mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 21:22:11 -03:00
Compare commits
246 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9199950b74 | ||
|
|
4c7e31687b | ||
|
|
75e207b520 | ||
|
|
631289b75e | ||
|
|
1b958d0a5d | ||
|
|
35fdf9020d | ||
|
|
45926b1dca | ||
|
|
686ba5024d | ||
|
|
cf375c7c86 | ||
|
|
5e53d76f44 | ||
|
|
7757f72859 | ||
|
|
c8cc584049 | ||
|
|
2cdd269bba | ||
|
|
d2d97ae5bb | ||
|
|
d08d77c555 | ||
|
|
92f8d2139a | ||
|
|
50f2c2dfe6 | ||
|
|
3539c453d3 | ||
|
|
1631122f95 | ||
|
|
8fcb979544 | ||
|
|
8a5af0b7f3 | ||
|
|
cb1f08d556 | ||
|
|
1150267765 | ||
|
|
5c1252548d | ||
|
|
3c7cdf5db8 | ||
|
|
9ac4203b1c | ||
|
|
d0800510db | ||
|
|
f8ba551cc4 | ||
|
|
413444500e | ||
|
|
e21d5835ec | ||
|
|
f2f354e478 | ||
|
|
b195d4569c | ||
|
|
3b77fed72d | ||
|
|
fc64e97f92 | ||
|
|
1da0434454 | ||
|
|
cf2fe40612 | ||
|
|
8f46433ff7 | ||
|
|
f3be3ae269 | ||
|
|
cfec5447d3 | ||
|
|
2d36b461cf | ||
|
|
5e23e4b13d | ||
|
|
badae2e8b3 | ||
|
|
9e64531de6 | ||
|
|
fdec8d283c | ||
|
|
9abedbf7cb | ||
|
|
66004c1cdc | ||
|
|
5b564cd8a3 | ||
|
|
2e79970e6e | ||
|
|
67c82ba6ea | ||
|
|
98425f37b8 | ||
|
|
9d22dd3465 | ||
|
|
837138db49 | ||
|
|
d43d992362 | ||
|
|
16b611cb7e | ||
|
|
8dde2d5e0d | ||
|
|
22b0b2bd24 | ||
|
|
056f727bfd | ||
|
|
0aa6c53c1f | ||
|
|
d9b0660611 | ||
|
|
d01666f4e2 | ||
|
|
51bee87cd0 | ||
|
|
3041b443e5 | ||
|
|
d95e6c939b | ||
|
|
fd38c63b35 | ||
|
|
b69c24ae14 | ||
|
|
65a0c00e33 | ||
|
|
b12a5ef133 | ||
|
|
9e1b92c26e | ||
|
|
3922aec36e | ||
|
|
41cca8e56d | ||
|
|
2d37a7341a | ||
|
|
40e3c6134c | ||
|
|
edddd47a1e | ||
|
|
4ea6f38645 | ||
|
|
40d998a026 | ||
|
|
3af8f151ac | ||
|
|
e066fa6873 | ||
|
|
6bd94269d4 | ||
|
|
c90edec18a | ||
|
|
cbb302614c | ||
|
|
c54611a11b | ||
|
|
88f249649a | ||
|
|
fe9fbdb93c | ||
|
|
28bc966b76 | ||
|
|
77bbf85b52 | ||
|
|
3b1990e97a | ||
|
|
375b5a49f3 | ||
|
|
392c157cb5 | ||
|
|
6f5bf4b582 | ||
|
|
2e3f48ebb7 | ||
|
|
e4a2c518bb | ||
|
|
f19fb68b4c | ||
|
|
9121c12a2c | ||
|
|
d0fe28cfe2 | ||
|
|
656e3e43be | ||
|
|
c2c1772371 | ||
|
|
88d5caf642 | ||
|
|
1684978693 | ||
|
|
8e4927600f | ||
|
|
4d72dc57e7 | ||
|
|
e7316b3389 | ||
|
|
e17b374606 | ||
|
|
141f83065f | ||
|
|
6381dbafc1 | ||
|
|
fc9db4510f | ||
|
|
66abf736c9 | ||
|
|
af713470c1 | ||
|
|
93a51d2bcb | ||
|
|
3f3e06de8a | ||
|
|
7315aac9d8 | ||
|
|
d933308a6f | ||
|
|
3baf93dcc5 | ||
|
|
6ba14bd8fe | ||
|
|
7499570766 | ||
|
|
003ee55a75 | ||
|
|
b0cc42ef1f | ||
|
|
23679ec3f5 | ||
|
|
da52e5b9dd | ||
|
|
c4e357793f | ||
|
|
6c3424029c | ||
|
|
dd9e6a5b69 | ||
|
|
095320ef72 | ||
|
|
35f7674bcd | ||
|
|
26b36c123d | ||
|
|
c85e694c1d | ||
|
|
ec05282db6 | ||
|
|
3d6f9b226f | ||
|
|
eda6df4a5d | ||
|
|
d504f89f6a | ||
|
|
14c468f2a2 | ||
|
|
2a99b0e46f | ||
|
|
ae8914f5c8 | ||
|
|
0c9f8971ce | ||
|
|
d7a75ea4e5 | ||
|
|
3ad8d8b17c | ||
|
|
39225dc204 | ||
|
|
4fb69f7d89 | ||
|
|
0890c6ad24 | ||
|
|
dd81809589 | ||
|
|
f0672beb46 | ||
|
|
cc5301e710 | ||
|
|
9d5ec43c4e | ||
|
|
6d41211b07 | ||
|
|
d58b61eed5 | ||
|
|
4b53d98bfc | ||
|
|
f51f354e48 | ||
|
|
59d027181d | ||
|
|
0d0988c090 | ||
|
|
dc2de50924 | ||
|
|
12c88835f2 | ||
|
|
6f4453aaf3 | ||
|
|
4b4b8fe3c1 | ||
|
|
49e7c2e9f5 | ||
|
|
4653c273e3 | ||
|
|
ae145de2f2 | ||
|
|
dde7cf71c6 | ||
|
|
219cd242db | ||
|
|
e5b712c082 | ||
|
|
4d2c60d59b | ||
|
|
1d2c1b114b | ||
|
|
2bde936d05 | ||
|
|
cd3e32bf4b | ||
|
|
454536d631 | ||
|
|
656f1755fd | ||
|
|
8aa76ce5c1 | ||
|
|
49fa37f00d | ||
|
|
9f83548cf3 | ||
|
|
6054d95e85 | ||
|
|
8c9bb35824 | ||
|
|
3eacf9558a | ||
|
|
fee37172b4 | ||
|
|
e128c80eb1 | ||
|
|
5cc735ed57 | ||
|
|
43fcce6361 | ||
|
|
49b7126278 | ||
|
|
679cfb5c69 | ||
|
|
50616bc680 | ||
|
|
aaad270822 | ||
|
|
bd10280736 | ||
|
|
d477050239 | ||
|
|
85f79cd8d1 | ||
|
|
613cd81152 | ||
|
|
e0aba6c49a | ||
|
|
d78bcf2494 | ||
|
|
f7cffd2eba | ||
|
|
0d0b91aa80 | ||
|
|
42872e6d2d | ||
|
|
b91f06405d | ||
|
|
dac4c688d6 | ||
|
|
097a68ad18 | ||
|
|
4a98710db0 | ||
|
|
d033a374dd | ||
|
|
6aa23fe36a | ||
|
|
3220cfb79c | ||
|
|
b92e7aa446 | ||
|
|
c3b9c73541 | ||
|
|
81c6672880 | ||
|
|
08baf884d3 | ||
|
|
1c4096f3d5 | ||
|
|
66a3f3f59a | ||
|
|
624df1328b | ||
|
|
c063854b51 | ||
|
|
8cf99dd928 | ||
|
|
c07e885725 | ||
|
|
21772feadd | ||
|
|
2d00cfdd31 | ||
|
|
49e03d658b | ||
|
|
fec85bcc08 | ||
|
|
0e93a6bcb0 | ||
|
|
7e20f738fb | ||
|
|
24090e6077 | ||
|
|
1022b07f64 | ||
|
|
4faf912c6f | ||
|
|
56e4b24b07 | ||
|
|
12295d2fdc | ||
|
|
6261f7d18d | ||
|
|
9e1a2e3bb7 | ||
|
|
40cbb2155c | ||
|
|
a8d7070832 | ||
|
|
ab7266f3a4 | ||
|
|
3053b13fcb | ||
|
|
f3544b3471 | ||
|
|
1610048974 | ||
|
|
fc6f1bf95b | ||
|
|
67b274c1b2 | ||
|
|
fb0d6b5641 | ||
|
|
d30fbeb286 | ||
|
|
46e430ebbb | ||
|
|
bc4cd45fcb | ||
|
|
bdc86ddf15 | ||
|
|
ded17c1479 | ||
|
|
933e2fc01d | ||
|
|
1cddeee264 | ||
|
|
183c000080 | ||
|
|
a3c28c1003 | ||
|
|
f4b7c9a138 | ||
|
|
6b860b5f29 | ||
|
|
37dfcd6abd | ||
|
|
bc2fca3a4f | ||
|
|
f8ef159656 | ||
|
|
b2b8a9d37e | ||
|
|
15ae4031b7 | ||
|
|
688976ce3b | ||
|
|
a548af01dc | ||
|
|
0dd52eceb3 | ||
|
|
b8c6cf4ac1 |
69
.github/workflows/backend-tests.yml
vendored
Normal file
69
.github/workflows/backend-tests.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
name: Backend Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- 'py/**'
|
||||||
|
- 'standalone.py'
|
||||||
|
- 'tests/**'
|
||||||
|
- 'requirements.txt'
|
||||||
|
- 'requirements-dev.txt'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'pytest.ini'
|
||||||
|
- '.github/workflows/backend-tests.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'py/**'
|
||||||
|
- 'standalone.py'
|
||||||
|
- 'tests/**'
|
||||||
|
- 'requirements.txt'
|
||||||
|
- 'requirements-dev.txt'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'pytest.ini'
|
||||||
|
- '.github/workflows/backend-tests.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pytest:
|
||||||
|
name: Run pytest with coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
cache: 'pip'
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements.txt
|
||||||
|
requirements-dev.txt
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements-dev.txt
|
||||||
|
|
||||||
|
- name: Run pytest with coverage
|
||||||
|
env:
|
||||||
|
COVERAGE_FILE: coverage/backend/.coverage
|
||||||
|
run: |
|
||||||
|
mkdir -p coverage/backend
|
||||||
|
python -m pytest \
|
||||||
|
--cov=py \
|
||||||
|
--cov=standalone \
|
||||||
|
--cov-report=term-missing \
|
||||||
|
--cov-report=xml:coverage/backend/coverage.xml \
|
||||||
|
--cov-report=html:coverage/backend/html \
|
||||||
|
--cov-report=json:coverage/backend/coverage.json
|
||||||
|
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: backend-coverage
|
||||||
|
path: coverage/backend
|
||||||
|
if-no-files-found: warn
|
||||||
52
.github/workflows/frontend-tests.yml
vendored
Normal file
52
.github/workflows/frontend-tests.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Frontend Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- 'package.json'
|
||||||
|
- 'package-lock.json'
|
||||||
|
- 'vitest.config.js'
|
||||||
|
- 'tests/frontend/**'
|
||||||
|
- 'static/js/**'
|
||||||
|
- 'scripts/run_frontend_coverage.js'
|
||||||
|
- '.github/workflows/frontend-tests.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'package.json'
|
||||||
|
- 'package-lock.json'
|
||||||
|
- 'vitest.config.js'
|
||||||
|
- 'tests/frontend/**'
|
||||||
|
- 'static/js/**'
|
||||||
|
- 'scripts/run_frontend_coverage.js'
|
||||||
|
- '.github/workflows/frontend-tests.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
vitest:
|
||||||
|
name: Run Vitest with coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Use Node.js 20
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run frontend tests with coverage
|
||||||
|
run: npm run test:coverage
|
||||||
|
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: frontend-coverage
|
||||||
|
path: coverage/frontend
|
||||||
|
if-no-files-found: warn
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -6,3 +6,6 @@ py/run_test.py
|
|||||||
.vscode/
|
.vscode/
|
||||||
cache/
|
cache/
|
||||||
civitai/
|
civitai/
|
||||||
|
node_modules/
|
||||||
|
coverage/
|
||||||
|
.coverage
|
||||||
|
|||||||
22
AGENTS.md
Normal file
22
AGENTS.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Repository Guidelines
|
||||||
|
|
||||||
|
## Project Structure & Module Organization
|
||||||
|
ComfyUI LoRA Manager pairs a Python backend with browser-side widgets. Backend modules live in <code>py/</code> with HTTP entry points in <code>py/routes/</code>, feature logic in <code>py/services/</code>, shared helpers in <code>py/utils/</code>, and custom nodes in <code>py/nodes/</code>. UI scripts extend ComfyUI from <code>web/comfyui/</code>, while deploy-ready assets remain in <code>static/</code> and <code>templates/</code>. Localization files live in <code>locales/</code>, example workflows in <code>example_workflows/</code>, and interim tests such as <code>test_i18n.py</code> sit beside their source until a dedicated <code>tests/</code> tree lands.
|
||||||
|
|
||||||
|
## Build, Test, and Development Commands
|
||||||
|
- <code>pip install -r requirements.txt</code> installs backend dependencies.
|
||||||
|
- <code>python standalone.py --port 8188</code> launches the standalone server for iterative development.
|
||||||
|
- <code>python -m pytest test_i18n.py</code> runs the current regression suite; target new files explicitly, e.g. <code>python -m pytest tests/test_recipes.py</code>.
|
||||||
|
- <code>python scripts/sync_translation_keys.py</code> synchronizes locale keys after UI string updates.
|
||||||
|
|
||||||
|
## Coding Style & Naming Conventions
|
||||||
|
Follow PEP 8 with four-space indentation and descriptive snake_case file and function names such as <code>settings_manager.py</code>. Classes stay PascalCase, constants in UPPER_SNAKE_CASE, and loggers retrieved via <code>logging.getLogger(__name__)</code>. Prefer explicit type hints and docstrings on public APIs. JavaScript under <code>web/comfyui/</code> uses ES modules with camelCase helpers and the <code>_widget.js</code> suffix for UI components.
|
||||||
|
|
||||||
|
## Testing Guidelines
|
||||||
|
Pytest powers backend tests. Name modules <code>test_<feature>.py</code> and keep them near the code or in a future <code>tests/</code> package. Mock ComfyUI dependencies through helpers in <code>standalone.py</code>, keep filesystem fixtures deterministic, and ensure translations are covered. Run <code>python -m pytest</code> before submitting changes.
|
||||||
|
|
||||||
|
## Commit & Pull Request Guidelines
|
||||||
|
Commits follow the conventional format, e.g. <code>feat(settings): add default model path</code>, and should stay focused on a single concern. Pull requests must outline the problem, summarize the solution, list manual verification steps (server run, targeted pytest), and link related issues. Include screenshots or GIFs for UI or locale updates and call out migration steps such as <code>settings.json</code> adjustments.
|
||||||
|
|
||||||
|
## Configuration & Localization Tips
|
||||||
|
Copy <code>settings.json.example</code> to <code>settings.json</code> and adapt model directories before running the standalone server. Store reference assets in <code>civitai/</code> or <code>docs/</code> to keep runtime directories deploy-ready. Whenever UI text changes, update every <code>locales/<lang>.json</code> file and rerun the translation sync script so ComfyUI surfaces localized strings.
|
||||||
41
README.md
41
README.md
@@ -34,6 +34,14 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
|||||||
|
|
||||||
## Release Notes
|
## Release Notes
|
||||||
|
|
||||||
|
### v0.9.6
|
||||||
|
* **Critical Performance Optimization** - Introduced persistent model cache that dramatically accelerates initialization after startup and significantly reduces Python backend memory footprint for improved application performance.
|
||||||
|
* **Cross-Browser Settings Synchronization** - Migrated nearly all settings to the backend, ensuring your preferences sync automatically across all browsers for a seamless multi-browser experience.
|
||||||
|
* **Protected User Settings Location** - Relocated user settings (settings.json) to the user config directory (accessible via the link icon in Settings), preventing accidental deletion during reinstalls or updates.
|
||||||
|
* **Global Context Menu** - Added a new global context menu accessible by right-clicking on empty page areas, providing quick access to global operations with more features coming in future updates.
|
||||||
|
* **Multi-Library Support** - Introduced support for managing multiple libraries, allowing you to easily switch between different model collections (advanced usage, documentation in progress).
|
||||||
|
* **Bug Fixes & Stability Improvements** - Various bug fixes and enhancements for improved stability and reliability.
|
||||||
|
|
||||||
### v0.9.3
|
### v0.9.3
|
||||||
* **Metadata Archive Database Support** - Added the ability to download and utilize a metadata archive database, enabling access to metadata for models that have been deleted from CivitAI.
|
* **Metadata Archive Database Support** - Added the ability to download and utilize a metadata archive database, enabling access to metadata for models that have been deleted from CivitAI.
|
||||||
* **App-Level Proxy Settings** - Introduced support for configuring a global proxy within the application, making it easier to use the manager behind network restrictions.
|
* **App-Level Proxy Settings** - Introduced support for configuring a global proxy within the application, making it easier to use the manager behind network restrictions.
|
||||||
@@ -141,7 +149,7 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
|||||||
|
|
||||||
1. Download the [Portable Package](https://github.com/willmiao/ComfyUI-Lora-Manager/releases/download/v0.9.2/lora_manager_portable.7z)
|
1. Download the [Portable Package](https://github.com/willmiao/ComfyUI-Lora-Manager/releases/download/v0.9.2/lora_manager_portable.7z)
|
||||||
2. Copy the provided `settings.json.example` file to create a new file named `settings.json` in `comfyui-lora-manager` folder
|
2. Copy the provided `settings.json.example` file to create a new file named `settings.json` in `comfyui-lora-manager` folder
|
||||||
3. Edit `settings.json` to include your correct model folder paths and CivitAI API key
|
3. Edit the new `settings.json` to include your correct model folder paths and CivitAI API key
|
||||||
4. Run run.bat
|
4. Run run.bat
|
||||||
- To change the startup port, edit `run.bat` and modify the parameter (e.g. `--port 9001`)
|
- To change the startup port, edit `run.bat` and modify the parameter (e.g. `--port 9001`)
|
||||||
|
|
||||||
@@ -209,7 +217,7 @@ You can combine multiple patterns to create detailed, organized filenames for yo
|
|||||||
You can now run LoRA Manager independently from ComfyUI:
|
You can now run LoRA Manager independently from ComfyUI:
|
||||||
|
|
||||||
1. **For ComfyUI users**:
|
1. **For ComfyUI users**:
|
||||||
- Launch ComfyUI with LoRA Manager at least once to initialize the necessary path information in the `settings.json` file.
|
- Launch ComfyUI with LoRA Manager at least once to initialize the necessary path information in the `settings.json` file located in your user settings folder (see paths above).
|
||||||
- Make sure dependencies are installed: `pip install -r requirements.txt`
|
- Make sure dependencies are installed: `pip install -r requirements.txt`
|
||||||
- From your ComfyUI root directory, run:
|
- From your ComfyUI root directory, run:
|
||||||
```bash
|
```bash
|
||||||
@@ -231,8 +239,37 @@ You can now run LoRA Manager independently from ComfyUI:
|
|||||||
```
|
```
|
||||||
- Access the interface through your browser at: `http://localhost:8188/loras`
|
- Access the interface through your browser at: `http://localhost:8188/loras`
|
||||||
|
|
||||||
|
> **Note:** Existing installations automatically migrate the legacy `settings.json` from the plugin folder to the user settings directory the first time you launch this version.
|
||||||
|
|
||||||
This standalone mode provides a lightweight option for managing your model and recipe collection without needing to run the full ComfyUI environment, making it useful even for users who primarily use other stable diffusion interfaces.
|
This standalone mode provides a lightweight option for managing your model and recipe collection without needing to run the full ComfyUI environment, making it useful even for users who primarily use other stable diffusion interfaces.
|
||||||
|
|
||||||
|
## Testing & Coverage
|
||||||
|
|
||||||
|
### Backend
|
||||||
|
|
||||||
|
Install the development dependencies and run pytest with coverage reports:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements-dev.txt
|
||||||
|
COVERAGE_FILE=coverage/backend/.coverage pytest \
|
||||||
|
--cov=py \
|
||||||
|
--cov=standalone \
|
||||||
|
--cov-report=term-missing \
|
||||||
|
--cov-report=html:coverage/backend/html \
|
||||||
|
--cov-report=xml:coverage/backend/coverage.xml \
|
||||||
|
--cov-report=json:coverage/backend/coverage.json
|
||||||
|
```
|
||||||
|
|
||||||
|
HTML, XML, and JSON artifacts are stored under `coverage/backend/` so you can inspect hot spots locally or from CI artifacts.
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
|
||||||
|
Run the Vitest coverage suite to analyze widget hot spots:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run test:coverage
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|||||||
39
__init__.py
39
__init__.py
@@ -1,13 +1,32 @@
|
|||||||
from .py.lora_manager import LoraManager
|
try: # pragma: no cover - import fallback for pytest collection
|
||||||
from .py.nodes.lora_loader import LoraManagerLoader, LoraManagerTextLoader
|
from .py.lora_manager import LoraManager
|
||||||
from .py.nodes.trigger_word_toggle import TriggerWordToggle
|
from .py.nodes.lora_loader import LoraManagerLoader, LoraManagerTextLoader
|
||||||
from .py.nodes.lora_stacker import LoraStacker
|
from .py.nodes.trigger_word_toggle import TriggerWordToggle
|
||||||
from .py.nodes.save_image import SaveImage
|
from .py.nodes.lora_stacker import LoraStacker
|
||||||
from .py.nodes.debug_metadata import DebugMetadata
|
from .py.nodes.save_image import SaveImage
|
||||||
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
|
from .py.nodes.debug_metadata import DebugMetadata
|
||||||
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
|
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
|
||||||
# Import metadata collector to install hooks on startup
|
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
|
||||||
from .py.metadata_collector import init as init_metadata_collector
|
from .py.metadata_collector import init as init_metadata_collector
|
||||||
|
except ImportError: # pragma: no cover - allows running under pytest without package install
|
||||||
|
import importlib
|
||||||
|
import pathlib
|
||||||
|
import sys
|
||||||
|
|
||||||
|
package_root = pathlib.Path(__file__).resolve().parent
|
||||||
|
if str(package_root) not in sys.path:
|
||||||
|
sys.path.append(str(package_root))
|
||||||
|
|
||||||
|
LoraManager = importlib.import_module("py.lora_manager").LoraManager
|
||||||
|
LoraManagerLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerLoader
|
||||||
|
LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader
|
||||||
|
TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle
|
||||||
|
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
|
||||||
|
SaveImage = importlib.import_module("py.nodes.save_image").SaveImage
|
||||||
|
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
|
||||||
|
WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect
|
||||||
|
WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText
|
||||||
|
init_metadata_collector = importlib.import_module("py.metadata_collector").init
|
||||||
|
|
||||||
NODE_CLASS_MAPPINGS = {
|
NODE_CLASS_MAPPINGS = {
|
||||||
LoraManagerLoader.NAME: LoraManagerLoader,
|
LoraManagerLoader.NAME: LoraManagerLoader,
|
||||||
|
|||||||
@@ -1,182 +0,0 @@
|
|||||||
# Event Management Implementation Summary
|
|
||||||
|
|
||||||
## What Has Been Implemented
|
|
||||||
|
|
||||||
### 1. Enhanced EventManager Class
|
|
||||||
- **Location**: `static/js/utils/EventManager.js`
|
|
||||||
- **Features**:
|
|
||||||
- Priority-based event handling
|
|
||||||
- Conditional execution based on application state
|
|
||||||
- Element filtering (target/exclude selectors)
|
|
||||||
- Mouse button filtering
|
|
||||||
- Automatic cleanup with cleanup functions
|
|
||||||
- State tracking for app modes
|
|
||||||
- Error handling for event handlers
|
|
||||||
|
|
||||||
### 2. BulkManager Integration
|
|
||||||
- **Location**: `static/js/managers/BulkManager.js`
|
|
||||||
- **Migrated Events**:
|
|
||||||
- Global keyboard shortcuts (Ctrl+A, Escape, B key)
|
|
||||||
- Marquee selection events (mousedown, mousemove, mouseup, contextmenu)
|
|
||||||
- State synchronization with EventManager
|
|
||||||
- **Benefits**:
|
|
||||||
- Centralized priority handling
|
|
||||||
- Conditional execution based on modal state
|
|
||||||
- Better coordination with other components
|
|
||||||
|
|
||||||
### 3. UIHelpers Integration
|
|
||||||
- **Location**: `static/js/utils/uiHelpers.js`
|
|
||||||
- **Migrated Events**:
|
|
||||||
- Mouse position tracking for node selector positioning
|
|
||||||
- Node selector click events (outside clicks and selection)
|
|
||||||
- State management for node selector
|
|
||||||
- **Benefits**:
|
|
||||||
- Reduced direct DOM listeners
|
|
||||||
- Coordinated state tracking
|
|
||||||
- Better cleanup
|
|
||||||
|
|
||||||
### 4. ModelCard Integration
|
|
||||||
- **Location**: `static/js/components/shared/ModelCard.js`
|
|
||||||
- **Migrated Events**:
|
|
||||||
- Model card click delegation
|
|
||||||
- Action button handling (star, globe, copy, etc.)
|
|
||||||
- Better return value handling for event propagation
|
|
||||||
- **Benefits**:
|
|
||||||
- Single event listener for all model cards
|
|
||||||
- Priority-based execution
|
|
||||||
- Better event flow control
|
|
||||||
|
|
||||||
### 5. Documentation and Initialization
|
|
||||||
- **EventManagerDocs.md**: Comprehensive documentation
|
|
||||||
- **eventManagementInit.js**: Initialization and global handlers
|
|
||||||
- **Features**:
|
|
||||||
- Global escape key handling
|
|
||||||
- Modal state synchronization
|
|
||||||
- Error handling
|
|
||||||
- Analytics integration points
|
|
||||||
- Cleanup on page unload
|
|
||||||
|
|
||||||
## Application States Tracked
|
|
||||||
|
|
||||||
1. **bulkMode**: When bulk selection mode is active
|
|
||||||
2. **marqueeActive**: When marquee selection is in progress
|
|
||||||
3. **modalOpen**: When any modal dialog is open
|
|
||||||
4. **nodeSelectorActive**: When node selector popup is visible
|
|
||||||
|
|
||||||
## Priority Levels Used
|
|
||||||
|
|
||||||
- **250+**: Critical system events (escape keys)
|
|
||||||
- **200+**: High priority system events (modal close)
|
|
||||||
- **100-199**: Application-level shortcuts (bulk operations)
|
|
||||||
- **80-99**: UI interactions (marquee selection)
|
|
||||||
- **60-79**: Component interactions (model cards)
|
|
||||||
- **10-49**: Tracking and monitoring
|
|
||||||
- **1-9**: Analytics and low-priority tasks
|
|
||||||
|
|
||||||
## Event Flow Examples
|
|
||||||
|
|
||||||
### Bulk Mode Toggle (B key)
|
|
||||||
1. **Priority 100**: BulkManager keyboard handler catches 'b' key
|
|
||||||
2. Toggles bulk mode state
|
|
||||||
3. Updates EventManager state
|
|
||||||
4. Updates UI accordingly
|
|
||||||
5. Stops propagation (returns true)
|
|
||||||
|
|
||||||
### Marquee Selection
|
|
||||||
1. **Priority 80**: BulkManager mousedown handler (only in .models-container, excluding cards/buttons)
|
|
||||||
2. Starts marquee selection
|
|
||||||
3. **Priority 90**: BulkManager mousemove handler (only when marquee active)
|
|
||||||
4. Updates selection rectangle
|
|
||||||
5. **Priority 90**: BulkManager mouseup handler ends selection
|
|
||||||
|
|
||||||
### Model Card Click
|
|
||||||
1. **Priority 60**: ModelCard delegation handler checks for specific elements
|
|
||||||
2. If action button: handles action and stops propagation
|
|
||||||
3. If general card click: continues to other handlers
|
|
||||||
4. Bulk selection may also handle the event if in bulk mode
|
|
||||||
|
|
||||||
## Remaining Event Listeners (Not Yet Migrated)
|
|
||||||
|
|
||||||
### High Priority for Migration
|
|
||||||
1. **SearchManager keyboard events** - Global search shortcuts
|
|
||||||
2. **ModalManager escape handling** - Already integrated with initialization
|
|
||||||
3. **Scroll-based events** - Back to top, virtual scrolling
|
|
||||||
4. **Resize events** - Panel positioning, responsive layouts
|
|
||||||
|
|
||||||
### Medium Priority
|
|
||||||
1. **Form input events** - Tag inputs, settings forms
|
|
||||||
2. **Component-specific events** - Recipe modal, showcase view
|
|
||||||
3. **Sidebar events** - Resize handling, toggle events
|
|
||||||
|
|
||||||
### Low Priority (Can Remain As-Is)
|
|
||||||
1. **VirtualScroller events** - Performance-critical, specialized
|
|
||||||
2. **Component lifecycle events** - Modal open/close callbacks
|
|
||||||
3. **One-time setup events** - Theme initialization, etc.
|
|
||||||
|
|
||||||
## Benefits Achieved
|
|
||||||
|
|
||||||
### Performance Improvements
|
|
||||||
- **Reduced DOM listeners**: From ~15+ individual listeners to ~5 coordinated handlers
|
|
||||||
- **Conditional execution**: Handlers only run when conditions are met
|
|
||||||
- **Priority ordering**: Important events handled first
|
|
||||||
- **Better memory management**: Automatic cleanup prevents leaks
|
|
||||||
|
|
||||||
### Coordination Improvements
|
|
||||||
- **State synchronization**: All components aware of app state
|
|
||||||
- **Event flow control**: Proper propagation stopping
|
|
||||||
- **Conflict resolution**: Priority system prevents conflicts
|
|
||||||
- **Debugging**: Centralized event handling for easier debugging
|
|
||||||
|
|
||||||
### Code Quality Improvements
|
|
||||||
- **Consistent patterns**: All event handling follows same patterns
|
|
||||||
- **Better separation of concerns**: Event logic separated from business logic
|
|
||||||
- **Error handling**: Centralized error catching and reporting
|
|
||||||
- **Documentation**: Clear patterns for future development
|
|
||||||
|
|
||||||
## Next Steps (Recommendations)
|
|
||||||
|
|
||||||
### 1. Migrate Search Events
|
|
||||||
```javascript
|
|
||||||
// In SearchManager.js
|
|
||||||
eventManager.addHandler('keydown', 'search-shortcuts', (e) => {
|
|
||||||
if ((e.ctrlKey || e.metaKey) && e.key === 'f') {
|
|
||||||
this.focusSearchInput();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}, { priority: 120 });
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Integrate Resize Events
|
|
||||||
```javascript
|
|
||||||
// Create ResizeManager
|
|
||||||
eventManager.addHandler('resize', 'layout-resize', debounce((e) => {
|
|
||||||
this.updateLayoutDimensions();
|
|
||||||
}, 250), { priority: 50 });
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Add Debug Mode
|
|
||||||
```javascript
|
|
||||||
// In EventManager.js
|
|
||||||
if (window.DEBUG_EVENTS) {
|
|
||||||
console.log(`Event ${eventType} handled by ${source} (priority: ${priority})`);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Create Event Analytics
|
|
||||||
```javascript
|
|
||||||
// Track event patterns for optimization
|
|
||||||
eventManager.addHandler('*', 'analytics', (e) => {
|
|
||||||
this.trackEventUsage(e.type, performance.now());
|
|
||||||
}, { priority: 1 });
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Recommendations
|
|
||||||
|
|
||||||
1. **Verify bulk mode interactions** work correctly
|
|
||||||
2. **Test marquee selection** in various scenarios
|
|
||||||
3. **Check modal state synchronization**
|
|
||||||
4. **Verify node selector** positioning and cleanup
|
|
||||||
5. **Test keyboard shortcuts** don't conflict
|
|
||||||
6. **Verify proper cleanup** when components are destroyed
|
|
||||||
|
|
||||||
The centralized event management system provides a solid foundation for coordinated, efficient event handling across the application while maintaining good performance and code organization.
|
|
||||||
@@ -1,301 +0,0 @@
|
|||||||
# Centralized Event Management System
|
|
||||||
|
|
||||||
This document describes the centralized event management system that coordinates event handling across the ComfyUI LoRA Manager application.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The `EventManager` class provides a centralized way to handle DOM events with priority-based execution, conditional execution based on application state, and proper cleanup mechanisms.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Priority-based execution**: Handlers with higher priority run first
|
|
||||||
- **Conditional execution**: Handlers can be executed based on application state
|
|
||||||
- **Element filtering**: Handlers can target specific elements or exclude others
|
|
||||||
- **Automatic cleanup**: Cleanup functions are called when handlers are removed
|
|
||||||
- **State tracking**: Tracks application states like bulk mode, modal open, etc.
|
|
||||||
|
|
||||||
## Basic Usage
|
|
||||||
|
|
||||||
### Importing
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
import { eventManager } from './EventManager.js';
|
|
||||||
```
|
|
||||||
|
|
||||||
### Adding Event Handlers
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
eventManager.addHandler('click', 'myComponent', (event) => {
|
|
||||||
console.log('Button clicked!');
|
|
||||||
return true; // Stop propagation to other handlers
|
|
||||||
}, {
|
|
||||||
priority: 100,
|
|
||||||
targetSelector: '.my-button',
|
|
||||||
skipWhenModalOpen: true
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Removing Event Handlers
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Remove specific handler
|
|
||||||
eventManager.removeHandler('click', 'myComponent');
|
|
||||||
|
|
||||||
// Remove all handlers for a component
|
|
||||||
eventManager.removeAllHandlersForSource('myComponent');
|
|
||||||
```
|
|
||||||
|
|
||||||
### Updating Application State
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Set state
|
|
||||||
eventManager.setState('bulkMode', true);
|
|
||||||
eventManager.setState('modalOpen', true);
|
|
||||||
|
|
||||||
// Get state
|
|
||||||
const isBulkMode = eventManager.getState('bulkMode');
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available States
|
|
||||||
|
|
||||||
- `bulkMode`: Whether bulk selection mode is active
|
|
||||||
- `marqueeActive`: Whether marquee selection is in progress
|
|
||||||
- `modalOpen`: Whether any modal is currently open
|
|
||||||
- `nodeSelectorActive`: Whether the node selector popup is active
|
|
||||||
|
|
||||||
## Handler Options
|
|
||||||
|
|
||||||
### Priority
|
|
||||||
Higher numbers = higher priority. Handlers run in descending priority order.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
priority: 100 // High priority
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Conditional Execution
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
onlyInBulkMode: true, // Only run when bulk mode is active
|
|
||||||
onlyWhenMarqueeActive: true, // Only run when marquee selection is active
|
|
||||||
skipWhenModalOpen: true, // Skip when any modal is open
|
|
||||||
skipWhenNodeSelectorActive: true, // Skip when node selector is active
|
|
||||||
onlyWhenNodeSelectorActive: true // Only run when node selector is active
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Element Filtering
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
targetSelector: '.model-card', // Only handle events on matching elements
|
|
||||||
excludeSelector: 'button, input', // Exclude events from these elements
|
|
||||||
button: 0 // Only handle specific mouse button (0=left, 1=middle, 2=right)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cleanup Functions
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
cleanup: () => {
|
|
||||||
// Custom cleanup logic
|
|
||||||
console.log('Handler cleaned up');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Integration Examples
|
|
||||||
|
|
||||||
### BulkManager Integration
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
class BulkManager {
|
|
||||||
registerEventHandlers() {
|
|
||||||
// High priority keyboard shortcuts
|
|
||||||
eventManager.addHandler('keydown', 'bulkManager-keyboard', (e) => {
|
|
||||||
return this.handleGlobalKeyboard(e);
|
|
||||||
}, {
|
|
||||||
priority: 100,
|
|
||||||
skipWhenModalOpen: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Marquee selection
|
|
||||||
eventManager.addHandler('mousedown', 'bulkManager-marquee-start', (e) => {
|
|
||||||
return this.handleMarqueeStart(e);
|
|
||||||
}, {
|
|
||||||
priority: 80,
|
|
||||||
skipWhenModalOpen: true,
|
|
||||||
targetSelector: '.models-container',
|
|
||||||
excludeSelector: '.model-card, button, input',
|
|
||||||
button: 0
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup() {
|
|
||||||
eventManager.removeAllHandlersForSource('bulkManager-keyboard');
|
|
||||||
eventManager.removeAllHandlersForSource('bulkManager-marquee-start');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Modal Integration
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
class ModalManager {
|
|
||||||
showModal(modalId) {
|
|
||||||
// Update state when modal opens
|
|
||||||
eventManager.setState('modalOpen', true);
|
|
||||||
this.displayModal(modalId);
|
|
||||||
}
|
|
||||||
|
|
||||||
closeModal(modalId) {
|
|
||||||
// Update state when modal closes
|
|
||||||
eventManager.setState('modalOpen', false);
|
|
||||||
this.hideModal(modalId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Component Event Delegation
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
export function setupComponentEvents() {
|
|
||||||
eventManager.addHandler('click', 'myComponent-actions', (event) => {
|
|
||||||
const button = event.target.closest('.action-button');
|
|
||||||
if (!button) return false;
|
|
||||||
|
|
||||||
this.handleAction(button.dataset.action);
|
|
||||||
return true; // Stop propagation
|
|
||||||
}, {
|
|
||||||
priority: 60,
|
|
||||||
targetSelector: '.component-container'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Best Practices
|
|
||||||
|
|
||||||
### 1. Use Descriptive Source Names
|
|
||||||
Use the format `componentName-purposeDescription`:
|
|
||||||
```javascript
|
|
||||||
// Good
|
|
||||||
'bulkManager-marqueeSelection'
|
|
||||||
'nodeSelector-clickOutside'
|
|
||||||
'modelCard-delegation'
|
|
||||||
|
|
||||||
// Avoid
|
|
||||||
'bulk'
|
|
||||||
'click'
|
|
||||||
'handler1'
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Set Appropriate Priorities
|
|
||||||
- 200+: Critical system events (escape keys, critical modals)
|
|
||||||
- 100-199: High priority application events (keyboard shortcuts)
|
|
||||||
- 50-99: Normal UI interactions (buttons, cards)
|
|
||||||
- 1-49: Low priority events (tracking, analytics)
|
|
||||||
|
|
||||||
### 3. Use Conditional Execution
|
|
||||||
Instead of checking state inside handlers, use options:
|
|
||||||
```javascript
|
|
||||||
// Good
|
|
||||||
eventManager.addHandler('click', 'bulk-action', handler, {
|
|
||||||
onlyInBulkMode: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Avoid
|
|
||||||
eventManager.addHandler('click', 'bulk-action', (e) => {
|
|
||||||
if (!state.bulkMode) return;
|
|
||||||
// handler logic
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Clean Up Properly
|
|
||||||
Always clean up handlers when components are destroyed:
|
|
||||||
```javascript
|
|
||||||
class MyComponent {
|
|
||||||
constructor() {
|
|
||||||
this.registerEvents();
|
|
||||||
}
|
|
||||||
|
|
||||||
destroy() {
|
|
||||||
eventManager.removeAllHandlersForSource('myComponent');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Return Values Matter
|
|
||||||
- Return `true` to stop event propagation to other handlers
|
|
||||||
- Return `false` or `undefined` to continue with other handlers
|
|
||||||
|
|
||||||
## Migration Guide
|
|
||||||
|
|
||||||
### From Direct Event Listeners
|
|
||||||
|
|
||||||
**Before:**
|
|
||||||
```javascript
|
|
||||||
document.addEventListener('click', (e) => {
|
|
||||||
if (e.target.closest('.my-button')) {
|
|
||||||
this.handleClick(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
**After:**
|
|
||||||
```javascript
|
|
||||||
eventManager.addHandler('click', 'myComponent-button', (e) => {
|
|
||||||
this.handleClick(e);
|
|
||||||
}, {
|
|
||||||
targetSelector: '.my-button'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### From Event Delegation
|
|
||||||
|
|
||||||
**Before:**
|
|
||||||
```javascript
|
|
||||||
container.addEventListener('click', (e) => {
|
|
||||||
const card = e.target.closest('.model-card');
|
|
||||||
if (!card) return;
|
|
||||||
|
|
||||||
if (e.target.closest('.action-btn')) {
|
|
||||||
this.handleAction(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
**After:**
|
|
||||||
```javascript
|
|
||||||
eventManager.addHandler('click', 'container-actions', (e) => {
|
|
||||||
const card = e.target.closest('.model-card');
|
|
||||||
if (!card) return false;
|
|
||||||
|
|
||||||
if (e.target.closest('.action-btn')) {
|
|
||||||
this.handleAction(e);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
targetSelector: '.container'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance Benefits
|
|
||||||
|
|
||||||
1. **Reduced DOM listeners**: Single listener per event type instead of multiple
|
|
||||||
2. **Conditional execution**: Handlers only run when conditions are met
|
|
||||||
3. **Priority ordering**: Important handlers run first, avoiding unnecessary work
|
|
||||||
4. **Automatic cleanup**: Prevents memory leaks from orphaned listeners
|
|
||||||
5. **Centralized debugging**: All event handling flows through one system
|
|
||||||
|
|
||||||
## Debugging
|
|
||||||
|
|
||||||
Enable debug logging to trace event handling:
|
|
||||||
```javascript
|
|
||||||
// Add to EventManager.js for debugging
|
|
||||||
console.log(`Handling ${eventType} event with ${handlers.length} handlers`);
|
|
||||||
```
|
|
||||||
|
|
||||||
The event manager provides a foundation for coordinated, efficient event handling across the entire application.
|
|
||||||
180
docs/LM-Extension-Wiki.md
Normal file
180
docs/LM-Extension-Wiki.md
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
## Overview
|
||||||
|
|
||||||
|
The **LoRA Manager Civitai Extension** is a Browser extension designed to work seamlessly with [LoRA Manager](https://github.com/willmiao/ComfyUI-Lora-Manager) to significantly enhance your browsing experience on [Civitai](https://civitai.com).
|
||||||
|
It also supports browsing on [CivArchive](https://civarchive.com/) (formerly CivitaiArchive).
|
||||||
|
|
||||||
|
With this extension, you can:
|
||||||
|
|
||||||
|
✅ Instantly see which models are already present in your local library
|
||||||
|
✅ Download new models with a single click
|
||||||
|
✅ Manage downloads efficiently with queue and parallel download support
|
||||||
|
✅ Keep your downloaded models automatically organized according to your custom settings
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Why Are All Features for Supporters Only?
|
||||||
|
|
||||||
|
I love building tools for the Stable Diffusion and ComfyUI communities, and LoRA Manager is a passion project that I've poured countless hours into. When I created this companion extension, my hope was to offer its core features for free, as a thank-you to all of you.
|
||||||
|
|
||||||
|
Unfortunately, I've reached a point where I need to be realistic. The level of support from the free model has been far lower than what's needed to justify the continuous development and maintenance for both projects. It was a difficult decision, but I've chosen to make the extension's features exclusive to supporters.
|
||||||
|
|
||||||
|
This change is crucial for me to be able to continue dedicating my time to improving the free and open-source LoRA Manager, which I'm committed to keeping available for everyone.
|
||||||
|
|
||||||
|
Your support does more than just unlock a few features—it allows me to keep innovating and ensures the core LoRA Manager project thrives. I'm incredibly grateful for your understanding and any support you can offer. ❤️
|
||||||
|
|
||||||
|
(_For those who previously supported me on Ko-fi with a one-time donation, I'll be sending out license keys individually as a thank-you._)
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### Supported Browsers & Installation Methods
|
||||||
|
|
||||||
|
| Browser | Installation Method |
|
||||||
|
|--------------------|-------------------------------------------------------------------------------------|
|
||||||
|
| **Google Chrome** | [Chrome Web Store link](https://chromewebstore.google.com/detail/capigligggeijgmocnaflanlbghnamgm?utm_source=item-share-cb) |
|
||||||
|
| **Microsoft Edge** | Install via Chrome Web Store (compatible) |
|
||||||
|
| **Brave Browser** | Install via Chrome Web Store (compatible) |
|
||||||
|
| **Opera** | Install via Chrome Web Store (compatible) |
|
||||||
|
| **Firefox** | <div id="firefox-install" class="install-ok"><a href="https://github.com/willmiao/lm-civitai-extension-firefox/releases/latest/download/extension.xpi">📦 Install Firefox Extension (reviewed and verified by Mozilla)</a></div> |
|
||||||
|
|
||||||
|
For non-Chrome browsers (e.g., Microsoft Edge), you can typically install extensions from the Chrome Web Store by following these steps: open the extension’s Chrome Web Store page, click 'Get extension', then click 'Allow' when prompted to enable installations from other stores, and finally click 'Add extension' to complete the installation.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Privacy & Security
|
||||||
|
|
||||||
|
I understand concerns around browser extensions and privacy, and I want to be fully transparent about how the **LM Civitai Extension** works:
|
||||||
|
|
||||||
|
- **Reviewed and Verified**
|
||||||
|
This extension has been **manually reviewed and approved by the Chrome Web Store**. The Firefox version uses the **exact same code** (only the packaging format differs) and has passed **Mozilla’s Add-on review**.
|
||||||
|
|
||||||
|
- **Minimal Network Access**
|
||||||
|
The only external server this extension connects to is:
|
||||||
|
**`https://willmiao.shop`** — used solely for **license validation**.
|
||||||
|
|
||||||
|
It does **not collect, transmit, or store any personal or usage data**.
|
||||||
|
No browsing history, no user IDs, no analytics, no hidden trackers.
|
||||||
|
|
||||||
|
- **Local-Only Model Detection**
|
||||||
|
Model detection and LoRA Manager communication all happen **locally** within your browser, directly interacting with your local LoRA Manager backend.
|
||||||
|
|
||||||
|
I value your trust and are committed to keeping your local setup private and secure. If you have any questions, feel free to reach out!
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## How to Use
|
||||||
|
|
||||||
|
After installing the extension, you'll automatically receive a **7-day trial** to explore all features.
|
||||||
|
|
||||||
|
When the extension is correctly installed and your license is valid:
|
||||||
|
|
||||||
|
- Open **Civitai**, and you'll see visual indicators added by the extension on model cards, showing:
|
||||||
|
- ✅ Models already present in your local library
|
||||||
|
- ⬇️ A download button for models not in your library
|
||||||
|
|
||||||
|
Clicking the download button adds the corresponding model version to the download queue, waiting to be downloaded. You can set up to **5 models to download simultaneously**.
|
||||||
|
|
||||||
|
### Visual Indicators Appear On:
|
||||||
|
|
||||||
|
- **Home Page** — Featured models
|
||||||
|
- **Models Page**
|
||||||
|
- **Creator Profiles** — If the creator has set their models to be visible
|
||||||
|
- **Recommended Resources** — On individual model pages
|
||||||
|
|
||||||
|
### Version Buttons on Model Pages
|
||||||
|
|
||||||
|
On a specific model page, visual indicators also appear on version buttons, showing which versions are already in your local library.
|
||||||
|
|
||||||
|
When switching to a specific version by clicking a version button:
|
||||||
|
|
||||||
|
- Clicking the download button will open a dropdown:
|
||||||
|
- Download via **LoRA Manager**
|
||||||
|
- Download via **Original Download** (browser download)
|
||||||
|
|
||||||
|
You can check **Remember my choice** to set your preferred default. You can change this setting anytime in the extension's settings.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Resources on Image Pages (2025-08-05) — now shows in-library indicators for image resources. ‘Import image as recipe’ coming soon!
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Model Download Location & LoRA Manager Settings
|
||||||
|
|
||||||
|
To use the **one-click download function**, you must first set:
|
||||||
|
|
||||||
|
- Your **Default LoRAs Root**
|
||||||
|
- Your **Default Checkpoints Root**
|
||||||
|
|
||||||
|
These are set within LoRA Manager's settings.
|
||||||
|
|
||||||
|
When everything is configured, downloaded model files will be placed in:
|
||||||
|
|
||||||
|
`<Default_Models_Root>/<Base_Model_of_the_Model>/<First_Tag_of_the_Model>`
|
||||||
|
|
||||||
|
|
||||||
|
### Update: Default Path Customization (2025-07-21)
|
||||||
|
|
||||||
|
A new setting to customize the default download path has been added in the nightly version. You can now personalize where models are saved when downloading via the LM Civitai Extension.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
The previous YAML path mapping file will be deprecated—settings will now be unified in settings.json to simplify configuration.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Backend Port Configuration
|
||||||
|
|
||||||
|
If your **ComfyUI** or **LoRA Manager** backend is running on a port **other than the default 8188**, you must configure the backend port in the extension's settings.
|
||||||
|
|
||||||
|
After correctly setting and saving the port, you'll see in the extension's header area:
|
||||||
|
- A **Healthy** status with the tooltip: `Connected to LoRA Manager on port xxxx`
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Advanced Usage
|
||||||
|
|
||||||
|
### Connecting to a Remote LoRA Manager
|
||||||
|
|
||||||
|
If your LoRA Manager is running on another computer, you can still connect from your browser using port forwarding.
|
||||||
|
|
||||||
|
> **Why can't you set a remote IP directly?**
|
||||||
|
>
|
||||||
|
> For privacy and security, the extension only requests access to `http://127.0.0.1/*`. Supporting remote IPs would require much broader permissions, which may be rejected by browser stores and could raise user concerns.
|
||||||
|
|
||||||
|
**Solution: Port Forwarding with `socat`**
|
||||||
|
|
||||||
|
On your browser computer, run:
|
||||||
|
|
||||||
|
`socat TCP-LISTEN:8188,bind=127.0.0.1,fork TCP:REMOTE.IP.ADDRESS.HERE:8188`
|
||||||
|
|
||||||
|
- Replace `REMOTE.IP.ADDRESS.HERE` with the IP of the machine running LoRA Manager.
|
||||||
|
- Adjust the port if needed.
|
||||||
|
|
||||||
|
This lets the extension connect to `127.0.0.1:8188` as usual, with traffic forwarded to your remote server.
|
||||||
|
|
||||||
|
_Thanks to user **Temikus** for sharing this solution!_
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
The extension will evolve alongside **LoRA Manager** improvements. Planned features include:
|
||||||
|
|
||||||
|
- [x] Support for **additional model types** (e.g., embeddings)
|
||||||
|
- [ ] One-click **Recipe Import**
|
||||||
|
- [x] Display of in-library status for all resources in the **Resources Used** section of the image page
|
||||||
|
- [x] One-click **Auto-organize Models**
|
||||||
|
|
||||||
|
**Stay tuned — and thank you for your support!**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
93
docs/architecture/example_images_routes.md
Normal file
93
docs/architecture/example_images_routes.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Example image route architecture
|
||||||
|
|
||||||
|
The example image routing stack mirrors the layered model route stack described in
|
||||||
|
[`docs/architecture/model_routes.md`](model_routes.md). HTTP wiring, controller setup,
|
||||||
|
handler orchestration, and long-running workflows now live in clearly separated modules so
|
||||||
|
we can extend download/import behaviour without touching the entire feature surface.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
subgraph HTTP
|
||||||
|
A[ExampleImagesRouteRegistrar] -->|binds| B[ExampleImagesRoutes controller]
|
||||||
|
end
|
||||||
|
subgraph Application
|
||||||
|
B --> C[ExampleImagesHandlerSet]
|
||||||
|
C --> D1[Handlers]
|
||||||
|
D1 --> E1[Use cases]
|
||||||
|
E1 --> F1[Download manager / processor / file manager]
|
||||||
|
end
|
||||||
|
subgraph Side Effects
|
||||||
|
F1 --> G1[Filesystem]
|
||||||
|
F1 --> G2[Model metadata]
|
||||||
|
F1 --> G3[WebSocket progress]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
## Layer responsibilities
|
||||||
|
|
||||||
|
| Layer | Module(s) | Responsibility |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| Registrar | `py/routes/example_images_route_registrar.py` | Declarative catalogue of every example image endpoint plus helpers that bind them to an `aiohttp` router. Keeps HTTP concerns symmetrical with the model registrar. |
|
||||||
|
| Controller | `py/routes/example_images_routes.py` | Lazily constructs `ExampleImagesHandlerSet`, injects defaults for the download manager, processor, and file manager, and exposes the registrar-ready mapping just like `BaseModelRoutes`. |
|
||||||
|
| Handler set | `py/routes/handlers/example_images_handlers.py` | Groups HTTP adapters by concern (downloads, imports/deletes, filesystem access). Each handler translates domain errors into HTTP responses and defers to a use case or utility service. |
|
||||||
|
| Use cases | `py/services/use_cases/example_images/*.py` | Encapsulate orchestration for downloads and imports. They validate input, translate concurrency/configuration errors, and keep handler logic declarative. |
|
||||||
|
| Supporting services | `py/utils/example_images_download_manager.py`, `py/utils/example_images_processor.py`, `py/utils/example_images_file_manager.py` | Execute long-running work: pull assets from Civitai, persist uploads, clean metadata, expose filesystem actions with guardrails, and broadcast progress snapshots. |
|
||||||
|
|
||||||
|
## Handler responsibilities & invariants
|
||||||
|
|
||||||
|
`ExampleImagesHandlerSet` flattens the handler objects into the `{"handler_name": coroutine}`
|
||||||
|
mapping consumed by the registrar. The table below outlines how each handler collaborates
|
||||||
|
with the use cases and utilities.
|
||||||
|
|
||||||
|
| Handler | Key endpoints | Collaborators | Contracts |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `ExampleImagesDownloadHandler` | `/api/lm/download-example-images`, `/api/lm/example-images-status`, `/api/lm/pause-example-images`, `/api/lm/resume-example-images`, `/api/lm/force-download-example-images` | `DownloadExampleImagesUseCase`, `DownloadManager` | Delegates payload validation and concurrency checks to the use case; progress/status endpoints expose the same snapshot used for WebSocket broadcasts; pause/resume surface `DownloadNotRunningError` as HTTP 400 instead of 500. |
|
||||||
|
| `ExampleImagesManagementHandler` | `/api/lm/import-example-images`, `/api/lm/delete-example-image` | `ImportExampleImagesUseCase`, `ExampleImagesProcessor` | Multipart uploads are streamed to disk via the use case; validation failures return HTTP 400 with no filesystem side effects; deletion funnels through the processor to prune metadata and cached images consistently. |
|
||||||
|
| `ExampleImagesFileHandler` | `/api/lm/open-example-images-folder`, `/api/lm/example-image-files`, `/api/lm/has-example-images` | `ExampleImagesFileManager` | Centralises filesystem access, enforcing settings-based root paths and returning HTTP 400/404 for missing configuration or folders; responses always include `success`/`has_images` booleans for UI consumption. |
|
||||||
|
|
||||||
|
## Use case boundaries
|
||||||
|
|
||||||
|
| Use case | Entry point | Dependencies | Guarantees |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `DownloadExampleImagesUseCase` | `execute(payload)` | `DownloadManager.start_download`, download configuration errors | Raises `DownloadExampleImagesInProgressError` when the manager reports an active job, rewraps configuration errors into `DownloadExampleImagesConfigurationError`, and lets `ExampleImagesDownloadError` bubble as 500s so handlers do not duplicate logging. |
|
||||||
|
| `ImportExampleImagesUseCase` | `execute(request)` | `ExampleImagesProcessor.import_images`, temporary file helpers | Supports multipart or JSON payloads, normalises file paths into a single list, cleans up temp files even on failure, and maps validation issues to `ImportExampleImagesValidationError` for HTTP 400 responses. |
|
||||||
|
|
||||||
|
## Maintaining critical invariants
|
||||||
|
|
||||||
|
* **Shared progress snapshots** - The download handler returns the same snapshot built by
|
||||||
|
`DownloadManager`, guaranteeing parity between HTTP polling endpoints and WebSocket
|
||||||
|
progress events.
|
||||||
|
* **Safe filesystem access** - All folder/file actions flow through
|
||||||
|
`ExampleImagesFileManager`, which validates the configured example image root and ensures
|
||||||
|
responses never leak absolute paths outside the allowed directory.
|
||||||
|
* **Metadata hygiene** - Import/delete operations run through `ExampleImagesProcessor`,
|
||||||
|
which updates model metadata via `MetadataManager` and notifies the relevant scanners so
|
||||||
|
cache state stays in sync.
|
||||||
|
|
||||||
|
## Migration notes
|
||||||
|
|
||||||
|
The refactor brings the example image stack in line with the model/recipe stacks:
|
||||||
|
|
||||||
|
1. `ExampleImagesRouteRegistrar` now owns the declarative route list. Downstream projects
|
||||||
|
should rely on `ExampleImagesRoutes.to_route_mapping()` instead of manually wiring
|
||||||
|
handler callables.
|
||||||
|
2. `ExampleImagesRoutes` caches its `ExampleImagesHandlerSet` just like
|
||||||
|
`BaseModelRoutes`. If you previously instantiated handlers directly, inject custom
|
||||||
|
collaborators via the controller constructor (`download_manager`, `processor`,
|
||||||
|
`file_manager`) to keep test seams predictable.
|
||||||
|
3. Tests that mocked `ExampleImagesRoutes.setup_routes` should switch to patching
|
||||||
|
`DownloadExampleImagesUseCase`/`ImportExampleImagesUseCase` at import time. The handlers
|
||||||
|
expect those abstractions to surface validation/concurrency errors, and bypassing them
|
||||||
|
will skip the HTTP-friendly error mapping.
|
||||||
|
|
||||||
|
## Extending the stack
|
||||||
|
|
||||||
|
1. Add the endpoint to `ROUTE_DEFINITIONS` with a unique `handler_name`.
|
||||||
|
2. Expose the coroutine on an existing handler class (or create a new handler and extend
|
||||||
|
`ExampleImagesHandlerSet`).
|
||||||
|
3. Wire additional services or factories inside `_build_handler_set` on
|
||||||
|
`ExampleImagesRoutes`, mirroring how the model stack introduces new use cases.
|
||||||
|
|
||||||
|
`tests/routes/test_example_images_routes.py` exercises registrar binding, download pause
|
||||||
|
flows, and import validations. Use it as a template when introducing new handler
|
||||||
|
collaborators or error mappings.
|
||||||
100
docs/architecture/model_routes.md
Normal file
100
docs/architecture/model_routes.md
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# Base model route architecture
|
||||||
|
|
||||||
|
The model routing stack now splits HTTP wiring, orchestration logic, and
|
||||||
|
business rules into discrete layers. The goal is to make it obvious where a
|
||||||
|
new collaborator should live and which contract it must honour. The diagram
|
||||||
|
below captures the end-to-end flow for a typical request:
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
subgraph HTTP
|
||||||
|
A[ModelRouteRegistrar] -->|binds| B[BaseModelRoutes handler proxy]
|
||||||
|
end
|
||||||
|
subgraph Application
|
||||||
|
B --> C[ModelHandlerSet]
|
||||||
|
C --> D1[Handlers]
|
||||||
|
D1 --> E1[Use cases]
|
||||||
|
E1 --> F1[Services / scanners]
|
||||||
|
end
|
||||||
|
subgraph Side Effects
|
||||||
|
F1 --> G1[Cache & metadata]
|
||||||
|
F1 --> G2[Filesystem]
|
||||||
|
F1 --> G3[WebSocket state]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
Every box maps to a concrete module:
|
||||||
|
|
||||||
|
| Layer | Module(s) | Responsibility |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| Registrar | `py/routes/model_route_registrar.py` | Declarative list of routes shared by every model type and helper methods for binding them to an `aiohttp` application. |
|
||||||
|
| Route controller | `py/routes/base_model_routes.py` | Constructs the handler graph, injects shared services, exposes proxies that surface `503 Service not ready` when the model service has not been attached. |
|
||||||
|
| Handler set | `py/routes/handlers/model_handlers.py` | Thin HTTP adapters grouped by concern (page rendering, listings, mutations, queries, downloads, CivitAI integration, move operations, auto-organize). |
|
||||||
|
| Use cases | `py/services/use_cases/*.py` | Encapsulate long-running flows (`DownloadModelUseCase`, `BulkMetadataRefreshUseCase`, `AutoOrganizeUseCase`). They normalise validation errors and concurrency constraints before returning control to the handlers. |
|
||||||
|
| Services | `py/services/*.py` | Existing services and scanners that mutate caches, write metadata, move files, and broadcast WebSocket updates. |
|
||||||
|
|
||||||
|
## Handler responsibilities & contracts
|
||||||
|
|
||||||
|
`ModelHandlerSet` flattens the handler objects into the exact callables used by
|
||||||
|
the registrar. The table below highlights the separation of concerns within
|
||||||
|
the set and the invariants that must hold after each handler returns.
|
||||||
|
|
||||||
|
| Handler | Key endpoints | Collaborators | Contracts |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `ModelPageView` | `/{prefix}` | `SettingsManager`, `server_i18n`, Jinja environment, `service.scanner` | Template is rendered with `is_initializing` flag when caches are cold; i18n filter is registered exactly once per environment instance. |
|
||||||
|
| `ModelListingHandler` | `/api/lm/{prefix}/list` | `service.get_paginated_data`, `service.format_response` | Listings respect pagination query parameters and cap `page_size` at 100; every item is formatted before response. |
|
||||||
|
| `ModelManagementHandler` | Mutations (delete, exclude, metadata, preview, tags, rename, bulk delete, duplicate verification) | `ModelLifecycleService`, `MetadataSyncService`, `PreviewAssetService`, `TagUpdateService`, scanner cache/index | Cache state mirrors filesystem changes: deletes prune cache & hash index, preview replacements synchronise metadata and cache NSFW levels, metadata saves trigger cache resort when names change. |
|
||||||
|
| `ModelQueryHandler` | Read-only queries (top tags, folders, duplicates, metadata, URLs) | Service query helpers & scanner cache | Outputs always wrapped in `{"success": True}` when no error; duplicate/filename grouping omits empty entries; invalid parameters (e.g. missing `model_root`) return HTTP 400. |
|
||||||
|
| `ModelDownloadHandler` | `/api/lm/download-model`, `/download-model-get`, `/download-progress/{id}`, `/cancel-download-get` | `DownloadModelUseCase`, `DownloadCoordinator`, `WebSocketManager` | Payload validation errors become HTTP 400 without mutating download progress cache; early-access failures surface as HTTP 401; successful downloads cache progress snapshots that back both WebSocket broadcasts and polling endpoints. |
|
||||||
|
| `ModelCivitaiHandler` | CivitAI metadata routes | `MetadataSyncService`, metadata provider factory, `BulkMetadataRefreshUseCase` | `fetch_all_civitai` streams progress via `WebSocketBroadcastCallback`; version lookups validate model type before returning; local availability fields derive from hash lookups without mutating cache state. |
|
||||||
|
| `ModelMoveHandler` | `move_model`, `move_models_bulk` | `ModelMoveService` | Moves execute atomically per request; bulk operations aggregate success/failure per file set. |
|
||||||
|
| `ModelAutoOrganizeHandler` | `/api/lm/{prefix}/auto-organize` (GET/POST), `/auto-organize-progress` | `AutoOrganizeUseCase`, `WebSocketProgressCallback`, `WebSocketManager` | Enforces single-flight execution using the shared lock; progress broadcasts remain available to polling clients until explicitly cleared; conflicts return HTTP 409 with a descriptive error. |
|
||||||
|
|
||||||
|
## Use case boundaries
|
||||||
|
|
||||||
|
Each use case exposes a narrow asynchronous API that hides the underlying
|
||||||
|
services. Their error mapping is essential for predictable HTTP responses.
|
||||||
|
|
||||||
|
| Use case | Entry point | Dependencies | Guarantees |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `DownloadModelUseCase` | `execute(payload)` | `DownloadCoordinator.schedule_download` | Translates `ValueError` into `DownloadModelValidationError` for HTTP 400, recognises early-access errors (`"401"` in message) and surfaces them as `DownloadModelEarlyAccessError`, forwards success dictionaries untouched. |
|
||||||
|
| `AutoOrganizeUseCase` | `execute(file_paths, progress_callback)` | `ModelFileService.auto_organize_models`, `WebSocketManager` lock | Guarded by `ws_manager` lock + status checks; raises `AutoOrganizeInProgressError` before invoking the file service when another run is already active. |
|
||||||
|
| `BulkMetadataRefreshUseCase` | `execute_with_error_handling(progress_callback)` | `MetadataSyncService`, `SettingsManager`, `WebSocketBroadcastCallback` | Iterates through cached models, applies metadata sync, emits progress snapshots that handlers broadcast unchanged. |
|
||||||
|
|
||||||
|
## Maintaining legacy contracts
|
||||||
|
|
||||||
|
The refactor preserves the invariants called out in the previous architecture
|
||||||
|
notes. The most critical ones are reiterated here to emphasise the
|
||||||
|
collaboration points:
|
||||||
|
|
||||||
|
1. **Cache mutations** – Delete, exclude, rename, and bulk delete operations are
|
||||||
|
channelled through `ModelManagementHandler`. The handler delegates to
|
||||||
|
`ModelLifecycleService` or `MetadataSyncService`, and the scanner cache is
|
||||||
|
mutated in-place before the handler returns. The accompanying tests assert
|
||||||
|
that `scanner._cache.raw_data` and `scanner._hash_index` stay in sync after
|
||||||
|
each mutation.
|
||||||
|
2. **Preview updates** – `PreviewAssetService.replace_preview` writes the new
|
||||||
|
asset, `MetadataSyncService` persists the JSON metadata, and
|
||||||
|
`scanner.update_preview_in_cache` mirrors the change. The handler returns
|
||||||
|
the static URL produced by `config.get_preview_static_url`, keeping browser
|
||||||
|
clients in lockstep with disk state.
|
||||||
|
3. **Download progress** – `DownloadCoordinator.schedule_download` generates the
|
||||||
|
download identifier, registers a WebSocket progress callback, and caches the
|
||||||
|
latest numeric progress via `WebSocketManager`. Both `download_model`
|
||||||
|
responses and `/download-progress/{id}` polling read from the same cache to
|
||||||
|
guarantee consistent progress reporting across transports.
|
||||||
|
|
||||||
|
## Extending the stack
|
||||||
|
|
||||||
|
To add a new shared route:
|
||||||
|
|
||||||
|
1. Declare it in `COMMON_ROUTE_DEFINITIONS` using a unique handler name.
|
||||||
|
2. Implement the corresponding coroutine on one of the handlers inside
|
||||||
|
`ModelHandlerSet` (or introduce a new handler class when the concern does not
|
||||||
|
fit existing ones).
|
||||||
|
3. Inject additional dependencies in `BaseModelRoutes._create_handler_set` by
|
||||||
|
wiring services or use cases through the constructor parameters.
|
||||||
|
|
||||||
|
Model-specific routes should continue to be registered inside the subclass
|
||||||
|
implementation of `setup_specific_routes`, reusing the shared registrar where
|
||||||
|
possible.
|
||||||
34
docs/architecture/multi_library_design.md
Normal file
34
docs/architecture/multi_library_design.md
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Multi-Library Management for Standalone Mode
|
||||||
|
|
||||||
|
## Requirements Summary
|
||||||
|
- **Independent libraries**: In standalone mode, users can maintain multiple libraries, where each library represents a distinct set of model folders (LoRAs, checkpoints, embeddings, etc.). Only one library is active at any given time, but users need a fast way to switch between them.
|
||||||
|
- **Library-specific settings**: The fields that vary per library are `folder_paths`, `default_lora_root`, `default_checkpoint_root`, and `default_embedding_root` inside `settings.json`.
|
||||||
|
- **Persistent caches**: Every library must have its own SQLite persistent model cache so that metadata generated for one library does not leak into another.
|
||||||
|
- **Backward compatibility**: Existing single-library setups should continue to work. When no multi-library configuration is provided, the application should behave exactly as before.
|
||||||
|
|
||||||
|
## Proposed Design
|
||||||
|
1. **Library registry**
|
||||||
|
- Extend the standalone configuration to hold a list of libraries, each identified by a unique name.
|
||||||
|
- Each entry stores the folder path configuration plus any library-scoped metadata (e.g. creation time, display name).
|
||||||
|
- The active library key is stored separately to allow quick switching without rewriting the full config.
|
||||||
|
2. **Settings management**
|
||||||
|
- Update `settings_manager` to load and persist the library registry. When a library is activated, hydrate the in-memory settings object with that library's folder configuration.
|
||||||
|
- Provide helper methods for creating, renaming, and deleting libraries, ensuring validation for duplicate names and path collisions.
|
||||||
|
- Continue writing the active library settings to `settings.json` for compatibility, while storing the registry in a new section such as `libraries`.
|
||||||
|
3. **Persistent model cache**
|
||||||
|
- Derive the SQLite file path from the active library, e.g. `model_cache_<library>.sqlite` or a nested directory structure like `model_cache/<library>/models.sqlite`.
|
||||||
|
- Update `PersistentModelCache` so it resolves the database path dynamically whenever the active library changes. Ensure connections are closed before switching to avoid locking issues.
|
||||||
|
- Migrate existing single cache files by treating them as the default library's cache.
|
||||||
|
4. **Model scanning workflow**
|
||||||
|
- Modify `ModelScanner` and related services to react to library switches by clearing in-memory caches, re-reading folder paths, and rehydrating metadata from the library-specific SQLite cache.
|
||||||
|
- Provide API endpoints in standalone mode to list libraries, activate one, and trigger a rescan.
|
||||||
|
5. **UI/UX considerations**
|
||||||
|
- In the standalone UI, introduce a library selector component that surfaces available libraries and offers quick switching.
|
||||||
|
- Offer feedback when switching libraries (e.g. spinner while rescanning) and guard destructive actions with confirmation prompts.
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
- **Data migration**: On startup, detect if the old `settings.json` structure is present. If so, create a default library entry using the current folder paths and point the active library to it.
|
||||||
|
- **Thread safety**: Ensure that any long-running scans are cancelled or awaited before switching libraries to prevent race conditions in cache writes.
|
||||||
|
- **Testing**: Add unit tests for the settings manager to cover library CRUD operations and cache path resolution. Include integration tests that simulate switching libraries and verifying that the correct models are loaded.
|
||||||
|
- **Documentation**: Update user guides to explain how to define libraries, switch between them, and where the new cache files are stored.
|
||||||
|
- **Extensibility**: Keep the design open to future per-library settings (e.g. auto-refresh intervals, metadata overrides) by storing library data as objects instead of flat maps.
|
||||||
89
docs/architecture/recipe_routes.md
Normal file
89
docs/architecture/recipe_routes.md
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
# Recipe route architecture
|
||||||
|
|
||||||
|
The recipe routing stack now mirrors the modular model route design. HTTP
|
||||||
|
bindings, controller wiring, handler orchestration, and business rules live in
|
||||||
|
separate layers so new behaviours can be added without re-threading the entire
|
||||||
|
feature. The diagram below outlines the flow for a typical request:
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
subgraph HTTP
|
||||||
|
A[RecipeRouteRegistrar] -->|binds| B[RecipeRoutes controller]
|
||||||
|
end
|
||||||
|
subgraph Application
|
||||||
|
B --> C[RecipeHandlerSet]
|
||||||
|
C --> D1[Handlers]
|
||||||
|
D1 --> E1[Use cases]
|
||||||
|
E1 --> F1[Services / scanners]
|
||||||
|
end
|
||||||
|
subgraph Side Effects
|
||||||
|
F1 --> G1[Cache & fingerprint index]
|
||||||
|
F1 --> G2[Metadata files]
|
||||||
|
F1 --> G3[Temporary shares]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
## Layer responsibilities
|
||||||
|
|
||||||
|
| Layer | Module(s) | Responsibility |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| Registrar | `py/routes/recipe_route_registrar.py` | Declarative list of every recipe endpoint and helper methods that bind them to an `aiohttp` application. |
|
||||||
|
| Controller | `py/routes/base_recipe_routes.py`, `py/routes/recipe_routes.py` | Lazily resolves scanners/clients from the service registry, wires shared templates/i18n, instantiates `RecipeHandlerSet`, and exposes a `{handler_name: coroutine}` mapping for the registrar. |
|
||||||
|
| Handler set | `py/routes/handlers/recipe_handlers.py` | Thin HTTP adapters grouped by concern (page view, listings, queries, mutations, sharing). They normalise responses and translate service exceptions into HTTP status codes. |
|
||||||
|
| Services & scanners | `py/services/recipes/*.py`, `py/services/recipe_scanner.py`, `py/services/service_registry.py` | Concrete business logic: metadata parsing, persistence, sharing, fingerprint/index maintenance, and cache refresh. |
|
||||||
|
|
||||||
|
## Handler responsibilities & invariants
|
||||||
|
|
||||||
|
`RecipeHandlerSet` flattens purpose-built handler objects into the callables the
|
||||||
|
registrar binds. Each handler is responsible for a narrow concern and enforces a
|
||||||
|
set of invariants before returning:
|
||||||
|
|
||||||
|
| Handler | Key endpoints | Collaborators | Contracts |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `RecipePageView` | `/loras/recipes` | `SettingsManager`, `server_i18n`, Jinja environment, recipe scanner getter | Template rendered with `is_initializing` flag when caches are still warming; i18n filter registered exactly once per environment instance. |
|
||||||
|
| `RecipeListingHandler` | `/api/lm/recipes`, `/api/lm/recipe/{id}` | `recipe_scanner.get_paginated_data`, `recipe_scanner.get_recipe_by_id` | Listings respect pagination and search filters; every item receives a `file_url` fallback even when metadata is incomplete; missing recipes become HTTP 404. |
|
||||||
|
| `RecipeQueryHandler` | Tag/base-model stats, syntax, LoRA lookups | Recipe scanner cache, `format_recipe_file_url` helper | Cache snapshots are reused without forcing refresh; duplicate lookups collapse groups by fingerprint; syntax lookups return helpful errors when LoRAs are absent. |
|
||||||
|
| `RecipeManagementHandler` | Save, update, reconnect, bulk delete, widget ingest | `RecipePersistenceService`, `RecipeAnalysisService`, recipe scanner | Persistence results propagate HTTP status codes; fingerprint/index updates flow through the scanner before returning; validation errors surface as HTTP 400 without touching disk. |
|
||||||
|
| `RecipeAnalysisHandler` | Uploaded/local/remote analysis | `RecipeAnalysisService`, `civitai_client`, recipe scanner | Unsupported content types map to HTTP 400; download errors (`RecipeDownloadError`) are not retried; every response includes a `loras` array for client compatibility. |
|
||||||
|
| `RecipeSharingHandler` | Share + download | `RecipeSharingService`, recipe scanner | Share responses provide a stable download URL and filename; expired shares surface as HTTP 404; downloads stream via `web.FileResponse` with attachment headers. |
|
||||||
|
|
||||||
|
## Use case boundaries
|
||||||
|
|
||||||
|
The dedicated services encapsulate long-running work so handlers stay thin.
|
||||||
|
|
||||||
|
| Use case | Entry point | Dependencies | Guarantees |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| `RecipeAnalysisService` | `analyze_uploaded_image`, `analyze_remote_image`, `analyze_local_image`, `analyze_widget_metadata` | `ExifUtils`, `RecipeParserFactory`, downloader factory, optional metadata collector/processor | Normalises missing/invalid payloads into `RecipeValidationError`; generates consistent fingerprint data to keep duplicate detection stable; temporary files are cleaned up after every analysis path. |
|
||||||
|
| `RecipePersistenceService` | `save_recipe`, `delete_recipe`, `update_recipe`, `reconnect_lora`, `bulk_delete`, `save_recipe_from_widget` | `ExifUtils`, recipe scanner, card preview sizing constants | Writes images/JSON metadata atomically; updates scanner caches and hash indices before returning; recalculates fingerprints whenever LoRA assignments change. |
|
||||||
|
| `RecipeSharingService` | `share_recipe`, `prepare_download` | `tempfile`, recipe scanner | Copies originals to TTL-managed temp files; metadata lookups re-use the scanner; expired shares trigger cleanup and `RecipeNotFoundError`. |
|
||||||
|
|
||||||
|
## Maintaining critical invariants
|
||||||
|
|
||||||
|
* **Cache updates** – Mutations (`save`, `delete`, `bulk_delete`, `update`) call
|
||||||
|
back into the recipe scanner to mutate the in-memory cache and fingerprint
|
||||||
|
index before returning a response. Tests assert that these methods are invoked
|
||||||
|
even when stubbing persistence.
|
||||||
|
* **Fingerprint management** – `RecipePersistenceService` recomputes
|
||||||
|
fingerprints whenever LoRA metadata changes and duplicate lookups use those
|
||||||
|
fingerprints to group recipes. Handlers bubble the resulting IDs so clients
|
||||||
|
can merge duplicates without an extra fetch.
|
||||||
|
* **Metadata synchronisation** – Saving or reconnecting a recipe updates the
|
||||||
|
JSON sidecar, refreshes embedded metadata via `ExifUtils`, and instructs the
|
||||||
|
scanner to resort its cache. Sharing relies on this metadata to generate
|
||||||
|
filenames and ensure downloads stay in sync with on-disk state.
|
||||||
|
|
||||||
|
## Extending the stack
|
||||||
|
|
||||||
|
1. Declare the new endpoint in `ROUTE_DEFINITIONS` with a unique handler name.
|
||||||
|
2. Implement the coroutine on an existing handler or introduce a new handler
|
||||||
|
class inside `py/routes/handlers/recipe_handlers.py` when the concern does
|
||||||
|
not fit existing ones.
|
||||||
|
3. Wire additional collaborators inside
|
||||||
|
`BaseRecipeRoutes._create_handler_set` (inject new services or factories) and
|
||||||
|
expose helper getters on the handler owner if the handler needs to share
|
||||||
|
utilities.
|
||||||
|
|
||||||
|
Integration tests in `tests/routes/test_recipe_routes.py` exercise the listing,
|
||||||
|
mutation, analysis-error, and sharing paths end-to-end, ensuring the controller
|
||||||
|
and handler wiring remains valid as new capabilities are added.
|
||||||
|
|
||||||
51
docs/frontend-dom-fixtures.md
Normal file
51
docs/frontend-dom-fixtures.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Frontend DOM Fixture Strategy
|
||||||
|
|
||||||
|
This guide outlines how to reproduce the markup emitted by the Django templates while running Vitest in jsdom. The aim is to make it straightforward to write integration-style unit tests for managers and UI helpers without having to duplicate template fragments inline.
|
||||||
|
|
||||||
|
## Loading Template Markup
|
||||||
|
|
||||||
|
Vitest executes inside Node, so we can read the same HTML templates that ship with the extension:
|
||||||
|
|
||||||
|
1. Use the helper utilities from `tests/frontend/utils/domFixtures.js` to read files under the `templates/` directory.
|
||||||
|
2. Mount the returned markup into `document.body` (or any custom container) before importing the module under test so its query selectors resolve correctly.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { renderTemplate } from '../utils/domFixtures.js'; // adjust the relative path to your spec
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
renderTemplate('loras.html', {
|
||||||
|
dataset: { page: 'loras' }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
The helper ensures the dataset is applied to the container, which mirrors how Django sets `data-page` in production.
|
||||||
|
|
||||||
|
## Working with Partial Components
|
||||||
|
|
||||||
|
Many features are implemented as template partials located under `templates/components/`. When a test only needs a fragment (for example, the progress panel or context menu markup), load the component file directly:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const container = renderTemplate('components/progress_panel.html');
|
||||||
|
|
||||||
|
const progressPanel = container.querySelector('#progress-panel');
|
||||||
|
```
|
||||||
|
|
||||||
|
This pattern avoids hand-written fixture strings and keeps the tests aligned with the actual markup.
|
||||||
|
|
||||||
|
## Resetting Between Tests
|
||||||
|
|
||||||
|
The shared Vitest setup clears `document.body` and storage APIs before each test. If a suite adds additional DOM nodes outside of the body or needs to reset custom attributes mid-test, use `resetDom()` exported from `domFixtures.js`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { resetDom } from '../utils/domFixtures.js';
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
resetDom();
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
- Provide typed helpers for injecting mock script tags (e.g., replicating ComfyUI globals).
|
||||||
|
- Compose higher-level fixtures that mimic specific pages (loras, checkpoints, recipes) once those managers receive dedicated suites.
|
||||||
44
docs/frontend-filtering-test-matrix.md
Normal file
44
docs/frontend-filtering-test-matrix.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# LoRA & Checkpoints Filtering/Sorting Test Matrix
|
||||||
|
|
||||||
|
This matrix captures the scenarios that Phase 3 frontend tests should cover for the LoRA and Checkpoint managers. It focuses on how search, filter, sort, and duplicate badge toggles interact so future specs can share fixtures and expectations.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- **Components**: `PageControls`, `FilterManager`, `SearchManager`, and `ModelDuplicatesManager` wiring invoked through `CheckpointsPageManager` and `LorasPageManager`.
|
||||||
|
- **Templates**: `templates/loras.html` and `templates/checkpoints.html` along with shared filter panel and toolbar partials.
|
||||||
|
- **APIs**: Requests issued through `baseModelApi.fetchModels` (via `resetAndReload`/`refreshModels`) and duplicates badge updates.
|
||||||
|
|
||||||
|
## Shared Setup Considerations
|
||||||
|
|
||||||
|
1. Render full page templates using `renderLorasPage` / `renderCheckpointsPage` helpers before importing modules so DOM queries resolve.
|
||||||
|
2. Stub storage helpers (`getStorageItem`, `setStorageItem`, `getSessionItem`, `setSessionItem`) to observe persistence behavior without mutating real storage.
|
||||||
|
3. Mock `sidebarManager` to capture refresh calls triggered after sort/filter actions.
|
||||||
|
4. Provide fake API implementations exposing `resetAndReload`, `refreshModels`, `fetchFromCivitai`, `toggleBulkMode`, and `clearCustomFilter` so control events remain asynchronous but deterministic.
|
||||||
|
5. Supply a minimal `ModelDuplicatesManager` mock exposing `toggleDuplicateMode`, `checkDuplicatesCount`, and `updateDuplicatesBadgeAfterRefresh` to validate duplicate badge wiring.
|
||||||
|
|
||||||
|
## Scenario Matrix
|
||||||
|
|
||||||
|
| ID | Feature | Scenario | LoRAs Expectations | Checkpoints Expectations | Notes |
|
||||||
|
| --- | --- | --- | --- | --- | --- |
|
||||||
|
| F-01 | Search filter | Typing a query updates `pageState.filters.search`, persists to session, and triggers `resetAndReload` on submit | Validate `SearchManager` writes query and reloads via API stub; confirm LoRA cards pass query downstream | Same as LoRAs | Cover `enter` press and clicking search icon |
|
||||||
|
| F-02 | Tag filter | Selecting a tag chip adds it to filters, applies active styling, and reloads results | Tag stored under `filters.tags`; `FilterManager.applyFilters` persists and triggers `resetAndReload(true)` | Same; ensure base model tag set is scoped to checkpoints dataset | Include removal path |
|
||||||
|
| F-03 | Base model filter | Toggling base model checkboxes updates `filters.baseModel`, persists, and reloads | Ensure only LoRA-supported models show; toggle multi-select | Ensure SDXL/Flux base models appear as expected | Capture UI state restored from storage on next init |
|
||||||
|
| F-04 | Favorites-only | Clicking favorites toggle updates session flag and calls `resetAndReload(true)` | Button gains `.active` class and API called | Same | Verify duplicates badge refresh when active |
|
||||||
|
| F-05 | Sort selection | Changing sort select saves preference (legacy + new format) and reloads | Confirm `PageControls.saveSortPreference` invoked with option and API called | Same with checkpoints-specific defaults | Cover `convertLegacySortFormat` branch |
|
||||||
|
| F-06 | Filter persistence | Re-initializing manager loads stored filters/sort and updates DOM | Filters pre-populate chips/checkboxes; favorites state restored | Same | Requires simulating repeated construction |
|
||||||
|
| F-07 | Combined filters | Applying search + tag + base model yields aggregated query params for fetch | Assert API receives merged filter payload | Same | Validate toast messaging for active filters |
|
||||||
|
| F-08 | Clearing filters | Using "Clear filters" resets state, storage, and reloads list | `FilterManager.clearFilters` empties `filters`, removes active class, shows toast | Same | Ensure favorites-only toggle unaffected |
|
||||||
|
| F-09 | Duplicate badge toggle | Pressing "Find duplicates" toggles duplicate mode and updates badge counts post-refresh | `ModelDuplicatesManager.toggleDuplicateMode` invoked and badge refresh called after API rebuild | Same plus checkpoint-specific duplicate badge dataset | Connects to future duplicate-specific specs |
|
||||||
|
| F-10 | Bulk actions menu | Opening bulk dropdown keeps filters intact and closes on outside click | Validate dropdown class toggling and no unintended reload | Same | Guard against regression when dropdown interacts with filters |
|
||||||
|
|
||||||
|
## Automation Coverage Status
|
||||||
|
|
||||||
|
- ✅ F-01 Search filter, F-02 Tag filter, F-03 Base model filter, F-04 Favorites-only toggle, F-05 Sort selection, and F-09 Duplicate badge toggle are covered by `tests/frontend/components/pageControls.filtering.test.js` for both LoRA and checkpoint pages.
|
||||||
|
- ⏳ F-06 Filter persistence, F-07 Combined filters, F-08 Clearing filters, and F-10 Bulk actions remain to be automated alongside upcoming bulk mode refinements.
|
||||||
|
|
||||||
|
## Coverage Gaps & Follow-Ups
|
||||||
|
|
||||||
|
- Write Vitest suites that exercise the matrix for both managers, sharing fixtures through page helpers to avoid duplication.
|
||||||
|
- Capture API parameter assertions by inspecting `baseModelApi.fetchModels` mocks rather than relying solely on state mutations.
|
||||||
|
- Add regression cases for legacy storage migrations (old filter keys) once fixtures exist for older payloads.
|
||||||
|
- Extend duplicate badge coverage with scenarios where `checkDuplicatesCount` signals zero duplicates versus pending calculations.
|
||||||
33
docs/frontend-testing-roadmap.md
Normal file
33
docs/frontend-testing-roadmap.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Frontend Automation Testing Roadmap
|
||||||
|
|
||||||
|
This roadmap tracks the planned rollout of automated testing for the ComfyUI LoRA Manager frontend. Each phase builds on the infrastructure introduced in this change set and records progress so future contributors can quickly identify the next tasks.
|
||||||
|
|
||||||
|
## Phase Overview
|
||||||
|
|
||||||
|
| Phase | Goal | Primary Focus | Status | Notes |
|
||||||
|
| --- | --- | --- | --- | --- |
|
||||||
|
| Phase 0 | Establish baseline tooling | Add Node test runner, jsdom environment, and seed smoke tests | ✅ Complete | Vitest + jsdom configured, example state tests committed |
|
||||||
|
| Phase 1 | Cover state management logic | Unit test selectors, derived data helpers, and storage utilities under `static/js/state` and `static/js/utils` | ✅ Complete | Storage helpers and state selectors now exercised via deterministic suites |
|
||||||
|
| Phase 2 | Test AppCore orchestration | Simulate page bootstrapping, infinite scroll hooks, and manager registration using JSDOM DOM fixtures | ✅ Complete | AppCore initialization + page feature suites now validate manager wiring, infinite scroll hooks, and onboarding gating |
|
||||||
|
| Phase 3 | Validate page-specific managers | Add focused suites for `loras`, `checkpoints`, `embeddings`, and `recipes` managers covering filtering, sorting, and bulk actions | ✅ Complete | LoRA/checkpoint suites expanded; embeddings + recipes managers now covered with initialization, filtering, and duplicate workflows |
|
||||||
|
| Phase 4 | Interaction-level regression tests | Exercise template fragments, modals, and menus to ensure UI wiring remains intact | ✅ Complete | Vitest DOM suites cover NSFW selector, recipe modal editing, and global context menus |
|
||||||
|
| Phase 5 | Continuous integration & coverage | Integrate frontend tests into CI workflow and track coverage metrics | ✅ Complete | CI workflow runs Vitest and aggregates V8 coverage into `coverage/frontend` via a dedicated script |
|
||||||
|
|
||||||
|
## Next Steps Checklist
|
||||||
|
|
||||||
|
- [x] Expand unit tests for `storageHelpers` covering migrations and namespace behavior.
|
||||||
|
- [x] Document DOM fixture strategy for reproducing template structures in tests.
|
||||||
|
- [x] Prototype AppCore initialization test that verifies manager bootstrapping with stubbed dependencies.
|
||||||
|
- [x] Add AppCore page feature suite exercising context menu creation and infinite scroll registration via DOM fixtures.
|
||||||
|
- [x] Extend AppCore orchestration tests to cover manager wiring, bulk menu setup, and onboarding gating scenarios.
|
||||||
|
- [x] Add interaction regression suites for context menus and recipe modals to complete Phase 4.
|
||||||
|
- [x] Evaluate integrating coverage reporting once test surface grows (> 20 specs).
|
||||||
|
- [x] Create shared fixtures for the loras and checkpoints pages once dedicated manager suites are added.
|
||||||
|
- [x] Draft focused test matrix for loras/checkpoints manager filtering and sorting paths ahead of Phase 3.
|
||||||
|
- [x] Implement LoRAs manager filtering/sorting specs for scenarios F-01–F-05 & F-09; queue remaining edge cases after duplicate/bulk flows stabilize.
|
||||||
|
- [x] Implement checkpoints manager filtering/sorting specs for scenarios F-01–F-05 & F-09; cover remaining paths alongside bulk action work.
|
||||||
|
- [x] Implement checkpoints page manager smoke tests covering initialization and duplicate badge wiring.
|
||||||
|
- [x] Outline focused checkpoints scenarios (filtering, sorting, duplicate badge toggles) to feed into the shared test matrix.
|
||||||
|
- [ ] Add duplicate badge regression coverage for zero/pending states after API refreshes.
|
||||||
|
|
||||||
|
Maintaining this roadmap alongside code changes will make it easier to append new automated test tasks and update their progress.
|
||||||
28
docs/library-switching.md
Normal file
28
docs/library-switching.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Library Switching and Preview Routes
|
||||||
|
|
||||||
|
Library switching no longer requires restarting the backend. The preview
|
||||||
|
thumbnails shown in the UI are now served through a dynamic endpoint that
|
||||||
|
resolves files against the folders registered for the active library at request
|
||||||
|
time. This allows the multi-library flow to update model roots without touching
|
||||||
|
the aiohttp router, so previews remain available immediately after a switch.
|
||||||
|
|
||||||
|
## How the dynamic preview endpoint works
|
||||||
|
|
||||||
|
* `config.get_preview_static_url()` now returns `/api/lm/previews?path=<encoded>`
|
||||||
|
for any preview path. The raw filesystem location is URL encoded so that it
|
||||||
|
can be passed through the query string without leaking directory structure in
|
||||||
|
the route itself.【F:py/config.py†L398-L404】
|
||||||
|
* `PreviewRoutes` exposes the `/api/lm/previews` handler which validates the
|
||||||
|
decoded path against the directories registered for the current library. The
|
||||||
|
request is rejected if it falls outside those roots or if the file does not
|
||||||
|
exist.【F:py/routes/preview_routes.py†L5-L21】【F:py/routes/handlers/preview_handlers.py†L9-L48】
|
||||||
|
* `Config` keeps an up-to-date cache of allowed preview roots. Every time a
|
||||||
|
library is applied the cache is rebuilt using the declared LoRA, checkpoint
|
||||||
|
and embedding directories (including symlink targets). The validation logic
|
||||||
|
checks preview requests against this cache.【F:py/config.py†L51-L68】【F:py/config.py†L180-L248】【F:py/config.py†L332-L346】
|
||||||
|
|
||||||
|
Both the ComfyUI runtime (`LoraManager.add_routes`) and the standalone launcher
|
||||||
|
(`StandaloneLoraManager.add_routes`) register the new preview routes instead of
|
||||||
|
mounting a static directory per root. Switching libraries therefore works
|
||||||
|
without restarting the application, and preview URLs generated before or after a
|
||||||
|
switch continue to resolve correctly.【F:py/lora_manager.py†L21-L82】【F:standalone.py†L302-L315】
|
||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 Bytes",
|
"zero": "0 Bytes",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "Keine Remote-Beispielbilder für dieses Modell auf Civitai verfügbar"
|
"noRemoteImagesAvailable": "Keine Remote-Beispielbilder für dieses Modell auf Civitai verfügbar"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "Beispielbilder herunterladen",
|
||||||
|
"missingPath": "Bitte legen Sie einen Speicherort fest, bevor Sie Beispielbilder herunterladen.",
|
||||||
|
"unavailable": "Beispielbild-Downloads sind noch nicht verfügbar. Versuchen Sie es erneut, nachdem die Seite vollständig geladen ist."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "Beispielbild-Ordner bereinigen",
|
||||||
|
"success": "{count} Ordner wurden in den Papierkorb verschoben",
|
||||||
|
"none": "Keine Beispielbild-Ordner mussten bereinigt werden",
|
||||||
|
"partial": "Bereinigung abgeschlossen, {failures} Ordner übersprungen",
|
||||||
|
"error": "Fehler beim Bereinigen der Beispielbild-Ordner: {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Civitai API Key",
|
"civitaiApiKey": "Civitai API Key",
|
||||||
"civitaiApiKeyPlaceholder": "Geben Sie Ihren Civitai API Key ein",
|
"civitaiApiKeyPlaceholder": "Geben Sie Ihren Civitai API Key ein",
|
||||||
"civitaiApiKeyHelp": "Wird für die Authentifizierung beim Herunterladen von Modellen von Civitai verwendet",
|
"civitaiApiKeyHelp": "Wird für die Authentifizierung beim Herunterladen von Modellen von Civitai verwendet",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "Einstellungsordner öffnen",
|
||||||
|
"tooltip": "Den Ordner mit der settings.json öffnen",
|
||||||
|
"success": "Einstellungsordner geöffnet",
|
||||||
|
"failed": "Einstellungsordner konnte nicht geöffnet werden"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "Inhaltsfilterung",
|
"contentFiltering": "Inhaltsfilterung",
|
||||||
"videoSettings": "Video-Einstellungen",
|
"videoSettings": "Video-Einstellungen",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "Aktive Bibliothek",
|
||||||
|
"activeLibraryHelp": "Zwischen den konfigurierten Bibliotheken wechseln, um die Standardordner zu aktualisieren. Eine Änderung der Auswahl lädt die Seite neu.",
|
||||||
|
"loadingLibraries": "Bibliotheken werden geladen...",
|
||||||
|
"noLibraries": "Keine Bibliotheken konfiguriert",
|
||||||
"defaultLoraRoot": "Standard-LoRA-Stammordner",
|
"defaultLoraRoot": "Standard-LoRA-Stammordner",
|
||||||
"defaultLoraRootHelp": "Legen Sie den Standard-LoRA-Stammordner für Downloads, Importe und Verschiebungen fest",
|
"defaultLoraRootHelp": "Legen Sie den Standard-LoRA-Stammordner für Downloads, Importe und Verschiebungen fest",
|
||||||
"defaultCheckpointRoot": "Standard-Checkpoint-Stammordner",
|
"defaultCheckpointRoot": "Standard-Checkpoint-Stammordner",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "Auswahl anzeigen",
|
"viewSelected": "Auswahl anzeigen",
|
||||||
"addTags": "Allen Tags hinzufügen",
|
"addTags": "Allen Tags hinzufügen",
|
||||||
"setBaseModel": "Basis-Modell für alle festlegen",
|
"setBaseModel": "Basis-Modell für alle festlegen",
|
||||||
|
"setContentRating": "Inhaltsbewertung für alle festlegen",
|
||||||
"copyAll": "Alle Syntax kopieren",
|
"copyAll": "Alle Syntax kopieren",
|
||||||
"refreshAll": "Alle Metadaten aktualisieren",
|
"refreshAll": "Alle Metadaten aktualisieren",
|
||||||
"moveAll": "Alle in Ordner verschieben",
|
"moveAll": "Alle in Ordner verschieben",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "Inhaltsbewertung festlegen",
|
"title": "Inhaltsbewertung festlegen",
|
||||||
"current": "Aktuell",
|
"current": "Aktuell",
|
||||||
|
"multiple": "Mehrere Werte",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "Stärke Min",
|
"strengthMin": "Stärke Min",
|
||||||
"strengthMax": "Stärke Max",
|
"strengthMax": "Stärke Max",
|
||||||
"strength": "Stärke",
|
"strength": "Stärke",
|
||||||
|
"clipStrength": "Clip-Stärke",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Wert",
|
"valuePlaceholder": "Wert",
|
||||||
"add": "Hinzufügen"
|
"add": "Hinzufügen"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "Basis-Modell erfolgreich für {count} Modell(e) aktualisiert",
|
"bulkBaseModelUpdateSuccess": "Basis-Modell erfolgreich für {count} Modell(e) aktualisiert",
|
||||||
"bulkBaseModelUpdatePartial": "{success} Modelle aktualisiert, {failed} fehlgeschlagen",
|
"bulkBaseModelUpdatePartial": "{success} Modelle aktualisiert, {failed} fehlgeschlagen",
|
||||||
"bulkBaseModelUpdateFailed": "Aktualisierung des Basis-Modells für ausgewählte Modelle fehlgeschlagen",
|
"bulkBaseModelUpdateFailed": "Aktualisierung des Basis-Modells für ausgewählte Modelle fehlgeschlagen",
|
||||||
|
"bulkContentRatingUpdating": "Inhaltsbewertung wird für {count} Modell(e) aktualisiert...",
|
||||||
|
"bulkContentRatingSet": "Inhaltsbewertung auf {level} für {count} Modell(e) gesetzt",
|
||||||
|
"bulkContentRatingPartial": "Inhaltsbewertung auf {level} für {success} Modell(e) gesetzt, {failed} fehlgeschlagen",
|
||||||
|
"bulkContentRatingFailed": "Inhaltsbewertung für ausgewählte Modelle konnte nicht aktualisiert werden",
|
||||||
"invalidCharactersRemoved": "Ungültige Zeichen aus Dateiname entfernt",
|
"invalidCharactersRemoved": "Ungültige Zeichen aus Dateiname entfernt",
|
||||||
"filenameCannotBeEmpty": "Dateiname darf nicht leer sein",
|
"filenameCannotBeEmpty": "Dateiname darf nicht leer sein",
|
||||||
"renameFailed": "Fehler beim Umbenennen der Datei: {message}",
|
"renameFailed": "Fehler beim Umbenennen der Datei: {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "Kompakt-Modus {state}",
|
"compactModeToggled": "Kompakt-Modus {state}",
|
||||||
"settingSaveFailed": "Fehler beim Speichern der Einstellung: {message}",
|
"settingSaveFailed": "Fehler beim Speichern der Einstellung: {message}",
|
||||||
"displayDensitySet": "Anzeige-Dichte auf {density} gesetzt",
|
"displayDensitySet": "Anzeige-Dichte auf {density} gesetzt",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "Fehler beim Ändern der Sprache: {message}",
|
"languageChangeFailed": "Fehler beim Ändern der Sprache: {message}",
|
||||||
"cacheCleared": "Cache-Dateien wurden erfolgreich gelöscht. Cache wird bei der nächsten Aktion neu aufgebaut.",
|
"cacheCleared": "Cache-Dateien wurden erfolgreich gelöscht. Cache wird bei der nächsten Aktion neu aufgebaut.",
|
||||||
"cacheClearFailed": "Fehler beim Löschen des Caches: {error}",
|
"cacheClearFailed": "Fehler beim Löschen des Caches: {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "Jetzt aktualisieren",
|
"refreshNow": "Jetzt aktualisieren",
|
||||||
"refreshingIn": "Aktualisierung in",
|
"refreshingIn": "Aktualisierung in",
|
||||||
"seconds": "Sekunden"
|
"seconds": "Sekunden"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 Bytes",
|
"zero": "0 Bytes",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "No remote example images available for this model on Civitai"
|
"noRemoteImagesAvailable": "No remote example images available for this model on Civitai"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "Download example images",
|
||||||
|
"missingPath": "Set a download location before downloading example images.",
|
||||||
|
"unavailable": "Example image downloads aren't available yet. Try again after the page finishes loading."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "Clean up example image folders",
|
||||||
|
"success": "Moved {count} folder(s) to the deleted folder",
|
||||||
|
"none": "No example image folders needed cleanup",
|
||||||
|
"partial": "Cleanup completed with {failures} folder(s) skipped",
|
||||||
|
"error": "Failed to clean example image folders: {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Civitai API Key",
|
"civitaiApiKey": "Civitai API Key",
|
||||||
"civitaiApiKeyPlaceholder": "Enter your Civitai API key",
|
"civitaiApiKeyPlaceholder": "Enter your Civitai API key",
|
||||||
"civitaiApiKeyHelp": "Used for authentication when downloading models from Civitai",
|
"civitaiApiKeyHelp": "Used for authentication when downloading models from Civitai",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "Open settings folder",
|
||||||
|
"tooltip": "Open the folder containing settings.json",
|
||||||
|
"success": "Opened settings.json folder",
|
||||||
|
"failed": "Failed to open settings.json folder"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "Content Filtering",
|
"contentFiltering": "Content Filtering",
|
||||||
"videoSettings": "Video Settings",
|
"videoSettings": "Video Settings",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "Active Library",
|
||||||
|
"activeLibraryHelp": "Switch between configured libraries to update default folders. Changing the selection reloads the page.",
|
||||||
|
"loadingLibraries": "Loading libraries...",
|
||||||
|
"noLibraries": "No libraries configured",
|
||||||
"defaultLoraRoot": "Default LoRA Root",
|
"defaultLoraRoot": "Default LoRA Root",
|
||||||
"defaultLoraRootHelp": "Set the default LoRA root directory for downloads, imports and moves",
|
"defaultLoraRootHelp": "Set the default LoRA root directory for downloads, imports and moves",
|
||||||
"defaultCheckpointRoot": "Default Checkpoint Root",
|
"defaultCheckpointRoot": "Default Checkpoint Root",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "View Selected",
|
"viewSelected": "View Selected",
|
||||||
"addTags": "Add Tags to All",
|
"addTags": "Add Tags to All",
|
||||||
"setBaseModel": "Set Base Model for All",
|
"setBaseModel": "Set Base Model for All",
|
||||||
|
"setContentRating": "Set Content Rating for All",
|
||||||
"copyAll": "Copy All Syntax",
|
"copyAll": "Copy All Syntax",
|
||||||
"refreshAll": "Refresh All Metadata",
|
"refreshAll": "Refresh All Metadata",
|
||||||
"moveAll": "Move All to Folder",
|
"moveAll": "Move All to Folder",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "Set Content Rating",
|
"title": "Set Content Rating",
|
||||||
"current": "Current",
|
"current": "Current",
|
||||||
|
"multiple": "Multiple values",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "Strength Min",
|
"strengthMin": "Strength Min",
|
||||||
"strengthMax": "Strength Max",
|
"strengthMax": "Strength Max",
|
||||||
"strength": "Strength",
|
"strength": "Strength",
|
||||||
|
"clipStrength": "Clip Strength",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Value",
|
"valuePlaceholder": "Value",
|
||||||
"add": "Add"
|
"add": "Add"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "Successfully updated base model for {count} model(s)",
|
"bulkBaseModelUpdateSuccess": "Successfully updated base model for {count} model(s)",
|
||||||
"bulkBaseModelUpdatePartial": "Updated {success} model(s), failed {failed} model(s)",
|
"bulkBaseModelUpdatePartial": "Updated {success} model(s), failed {failed} model(s)",
|
||||||
"bulkBaseModelUpdateFailed": "Failed to update base model for selected models",
|
"bulkBaseModelUpdateFailed": "Failed to update base model for selected models",
|
||||||
|
"bulkContentRatingUpdating": "Updating content rating for {count} model(s)...",
|
||||||
|
"bulkContentRatingSet": "Set content rating to {level} for {count} model(s)",
|
||||||
|
"bulkContentRatingPartial": "Set content rating to {level} for {success} model(s), {failed} failed",
|
||||||
|
"bulkContentRatingFailed": "Failed to update content rating for selected models",
|
||||||
"invalidCharactersRemoved": "Invalid characters removed from filename",
|
"invalidCharactersRemoved": "Invalid characters removed from filename",
|
||||||
"filenameCannotBeEmpty": "File name cannot be empty",
|
"filenameCannotBeEmpty": "File name cannot be empty",
|
||||||
"renameFailed": "Failed to rename file: {message}",
|
"renameFailed": "Failed to rename file: {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "Compact Mode {state}",
|
"compactModeToggled": "Compact Mode {state}",
|
||||||
"settingSaveFailed": "Failed to save setting: {message}",
|
"settingSaveFailed": "Failed to save setting: {message}",
|
||||||
"displayDensitySet": "Display Density set to {density}",
|
"displayDensitySet": "Display Density set to {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "Failed to change language: {message}",
|
"languageChangeFailed": "Failed to change language: {message}",
|
||||||
"cacheCleared": "Cache files have been cleared successfully. Cache will rebuild on next action.",
|
"cacheCleared": "Cache files have been cleared successfully. Cache will rebuild on next action.",
|
||||||
"cacheClearFailed": "Failed to clear cache: {error}",
|
"cacheClearFailed": "Failed to clear cache: {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "Refresh Now",
|
"refreshNow": "Refresh Now",
|
||||||
"refreshingIn": "Refreshing in",
|
"refreshingIn": "Refreshing in",
|
||||||
"seconds": "seconds"
|
"seconds": "seconds"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 Bytes",
|
"zero": "0 Bytes",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "No hay imágenes de ejemplo remotas disponibles para este modelo en Civitai"
|
"noRemoteImagesAvailable": "No hay imágenes de ejemplo remotas disponibles para este modelo en Civitai"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "Descargar imágenes de ejemplo",
|
||||||
|
"missingPath": "Establece una ubicación de descarga antes de descargar imágenes de ejemplo.",
|
||||||
|
"unavailable": "Las descargas de imágenes de ejemplo aún no están disponibles. Intenta de nuevo después de que la página termine de cargar."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "Limpiar carpetas de imágenes de ejemplo",
|
||||||
|
"success": "Se movieron {count} carpeta(s) a la carpeta de eliminados",
|
||||||
|
"none": "No hay carpetas de imágenes de ejemplo que necesiten limpieza",
|
||||||
|
"partial": "Limpieza completada con {failures} carpeta(s) omitidas",
|
||||||
|
"error": "No se pudieron limpiar las carpetas de imágenes de ejemplo: {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Clave API de Civitai",
|
"civitaiApiKey": "Clave API de Civitai",
|
||||||
"civitaiApiKeyPlaceholder": "Introduce tu clave API de Civitai",
|
"civitaiApiKeyPlaceholder": "Introduce tu clave API de Civitai",
|
||||||
"civitaiApiKeyHelp": "Utilizada para autenticación al descargar modelos de Civitai",
|
"civitaiApiKeyHelp": "Utilizada para autenticación al descargar modelos de Civitai",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "Abrir carpeta de ajustes",
|
||||||
|
"tooltip": "Abrir la carpeta que contiene settings.json",
|
||||||
|
"success": "Carpeta de settings.json abierta",
|
||||||
|
"failed": "No se pudo abrir la carpeta de settings.json"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "Filtrado de contenido",
|
"contentFiltering": "Filtrado de contenido",
|
||||||
"videoSettings": "Configuración de video",
|
"videoSettings": "Configuración de video",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "Biblioteca activa",
|
||||||
|
"activeLibraryHelp": "Alterna entre las bibliotecas configuradas para actualizar las carpetas predeterminadas. Cambiar la selección recarga la página.",
|
||||||
|
"loadingLibraries": "Cargando bibliotecas...",
|
||||||
|
"noLibraries": "No hay bibliotecas configuradas",
|
||||||
"defaultLoraRoot": "Raíz predeterminada de LoRA",
|
"defaultLoraRoot": "Raíz predeterminada de LoRA",
|
||||||
"defaultLoraRootHelp": "Establecer el directorio raíz predeterminado de LoRA para descargas, importaciones y movimientos",
|
"defaultLoraRootHelp": "Establecer el directorio raíz predeterminado de LoRA para descargas, importaciones y movimientos",
|
||||||
"defaultCheckpointRoot": "Raíz predeterminada de checkpoint",
|
"defaultCheckpointRoot": "Raíz predeterminada de checkpoint",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "Ver seleccionados",
|
"viewSelected": "Ver seleccionados",
|
||||||
"addTags": "Añadir etiquetas a todos",
|
"addTags": "Añadir etiquetas a todos",
|
||||||
"setBaseModel": "Establecer modelo base para todos",
|
"setBaseModel": "Establecer modelo base para todos",
|
||||||
|
"setContentRating": "Establecer clasificación de contenido para todos",
|
||||||
"copyAll": "Copiar toda la sintaxis",
|
"copyAll": "Copiar toda la sintaxis",
|
||||||
"refreshAll": "Actualizar todos los metadatos",
|
"refreshAll": "Actualizar todos los metadatos",
|
||||||
"moveAll": "Mover todos a carpeta",
|
"moveAll": "Mover todos a carpeta",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "Establecer clasificación de contenido",
|
"title": "Establecer clasificación de contenido",
|
||||||
"current": "Actual",
|
"current": "Actual",
|
||||||
|
"multiple": "Valores múltiples",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "Fuerza mínima",
|
"strengthMin": "Fuerza mínima",
|
||||||
"strengthMax": "Fuerza máxima",
|
"strengthMax": "Fuerza máxima",
|
||||||
"strength": "Fuerza",
|
"strength": "Fuerza",
|
||||||
|
"clipStrength": "Fuerza de Clip",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Valor",
|
"valuePlaceholder": "Valor",
|
||||||
"add": "Añadir"
|
"add": "Añadir"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "Modelo base actualizado exitosamente para {count} modelo(s)",
|
"bulkBaseModelUpdateSuccess": "Modelo base actualizado exitosamente para {count} modelo(s)",
|
||||||
"bulkBaseModelUpdatePartial": "Actualizados {success} modelo(s), fallaron {failed} modelo(s)",
|
"bulkBaseModelUpdatePartial": "Actualizados {success} modelo(s), fallaron {failed} modelo(s)",
|
||||||
"bulkBaseModelUpdateFailed": "Error al actualizar el modelo base para los modelos seleccionados",
|
"bulkBaseModelUpdateFailed": "Error al actualizar el modelo base para los modelos seleccionados",
|
||||||
|
"bulkContentRatingUpdating": "Actualizando la clasificación de contenido para {count} modelo(s)...",
|
||||||
|
"bulkContentRatingSet": "Clasificación de contenido establecida en {level} para {count} modelo(s)",
|
||||||
|
"bulkContentRatingPartial": "Clasificación de contenido establecida en {level} para {success} modelo(s), {failed} fallaron",
|
||||||
|
"bulkContentRatingFailed": "No se pudo actualizar la clasificación de contenido para los modelos seleccionados",
|
||||||
"invalidCharactersRemoved": "Caracteres inválidos eliminados del nombre de archivo",
|
"invalidCharactersRemoved": "Caracteres inválidos eliminados del nombre de archivo",
|
||||||
"filenameCannotBeEmpty": "El nombre de archivo no puede estar vacío",
|
"filenameCannotBeEmpty": "El nombre de archivo no puede estar vacío",
|
||||||
"renameFailed": "Error al renombrar archivo: {message}",
|
"renameFailed": "Error al renombrar archivo: {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "Modo compacto {state}",
|
"compactModeToggled": "Modo compacto {state}",
|
||||||
"settingSaveFailed": "Error al guardar configuración: {message}",
|
"settingSaveFailed": "Error al guardar configuración: {message}",
|
||||||
"displayDensitySet": "Densidad de visualización establecida a {density}",
|
"displayDensitySet": "Densidad de visualización establecida a {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "Error al cambiar idioma: {message}",
|
"languageChangeFailed": "Error al cambiar idioma: {message}",
|
||||||
"cacheCleared": "Archivos de caché limpiados exitosamente. La caché se reconstruirá en la próxima acción.",
|
"cacheCleared": "Archivos de caché limpiados exitosamente. La caché se reconstruirá en la próxima acción.",
|
||||||
"cacheClearFailed": "Error al limpiar caché: {error}",
|
"cacheClearFailed": "Error al limpiar caché: {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "Actualizar ahora",
|
"refreshNow": "Actualizar ahora",
|
||||||
"refreshingIn": "Actualizando en",
|
"refreshingIn": "Actualizando en",
|
||||||
"seconds": "segundos"
|
"seconds": "segundos"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 Octets",
|
"zero": "0 Octets",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "Aucune image d'exemple distante disponible pour ce modèle sur Civitai"
|
"noRemoteImagesAvailable": "Aucune image d'exemple distante disponible pour ce modèle sur Civitai"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "Télécharger les images d'exemple",
|
||||||
|
"missingPath": "Définissez un emplacement de téléchargement avant de télécharger les images d'exemple.",
|
||||||
|
"unavailable": "Le téléchargement des images d'exemple n'est pas encore disponible. Réessayez après le chargement complet de la page."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "Nettoyer les dossiers d'images d'exemple",
|
||||||
|
"success": "{count} dossier(s) déplacé(s) vers le dossier supprimé",
|
||||||
|
"none": "Aucun dossier d'images d'exemple à nettoyer",
|
||||||
|
"partial": "Nettoyage terminé avec {failures} dossier(s) ignoré(s)",
|
||||||
|
"error": "Échec du nettoyage des dossiers d'images d'exemple : {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Clé API Civitai",
|
"civitaiApiKey": "Clé API Civitai",
|
||||||
"civitaiApiKeyPlaceholder": "Entrez votre clé API Civitai",
|
"civitaiApiKeyPlaceholder": "Entrez votre clé API Civitai",
|
||||||
"civitaiApiKeyHelp": "Utilisée pour l'authentification lors du téléchargement de modèles depuis Civitai",
|
"civitaiApiKeyHelp": "Utilisée pour l'authentification lors du téléchargement de modèles depuis Civitai",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "Ouvrir le dossier des paramètres",
|
||||||
|
"tooltip": "Ouvrir le dossier contenant settings.json",
|
||||||
|
"success": "Dossier settings.json ouvert",
|
||||||
|
"failed": "Impossible d'ouvrir le dossier settings.json"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "Filtrage du contenu",
|
"contentFiltering": "Filtrage du contenu",
|
||||||
"videoSettings": "Paramètres vidéo",
|
"videoSettings": "Paramètres vidéo",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "Bibliothèque active",
|
||||||
|
"activeLibraryHelp": "Basculer entre les bibliothèques configurées pour mettre à jour les dossiers par défaut. Changer la sélection recharge la page.",
|
||||||
|
"loadingLibraries": "Chargement des bibliothèques...",
|
||||||
|
"noLibraries": "Aucune bibliothèque configurée",
|
||||||
"defaultLoraRoot": "Racine LoRA par défaut",
|
"defaultLoraRoot": "Racine LoRA par défaut",
|
||||||
"defaultLoraRootHelp": "Définir le répertoire racine LoRA par défaut pour les téléchargements, imports et déplacements",
|
"defaultLoraRootHelp": "Définir le répertoire racine LoRA par défaut pour les téléchargements, imports et déplacements",
|
||||||
"defaultCheckpointRoot": "Racine Checkpoint par défaut",
|
"defaultCheckpointRoot": "Racine Checkpoint par défaut",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "Voir la sélection",
|
"viewSelected": "Voir la sélection",
|
||||||
"addTags": "Ajouter des tags à tous",
|
"addTags": "Ajouter des tags à tous",
|
||||||
"setBaseModel": "Définir le modèle de base pour tous",
|
"setBaseModel": "Définir le modèle de base pour tous",
|
||||||
|
"setContentRating": "Définir la classification du contenu pour tous",
|
||||||
"copyAll": "Copier toute la syntaxe",
|
"copyAll": "Copier toute la syntaxe",
|
||||||
"refreshAll": "Actualiser toutes les métadonnées",
|
"refreshAll": "Actualiser toutes les métadonnées",
|
||||||
"moveAll": "Déplacer tout vers un dossier",
|
"moveAll": "Déplacer tout vers un dossier",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "Définir la classification du contenu",
|
"title": "Définir la classification du contenu",
|
||||||
"current": "Actuel",
|
"current": "Actuel",
|
||||||
|
"multiple": "Valeurs multiples",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "Force Min",
|
"strengthMin": "Force Min",
|
||||||
"strengthMax": "Force Max",
|
"strengthMax": "Force Max",
|
||||||
"strength": "Force",
|
"strength": "Force",
|
||||||
|
"clipStrength": "Force Clip",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Valeur",
|
"valuePlaceholder": "Valeur",
|
||||||
"add": "Ajouter"
|
"add": "Ajouter"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "Modèle de base mis à jour avec succès pour {count} modèle(s)",
|
"bulkBaseModelUpdateSuccess": "Modèle de base mis à jour avec succès pour {count} modèle(s)",
|
||||||
"bulkBaseModelUpdatePartial": "{success} modèle(s) mis à jour, {failed} modèle(s) en échec",
|
"bulkBaseModelUpdatePartial": "{success} modèle(s) mis à jour, {failed} modèle(s) en échec",
|
||||||
"bulkBaseModelUpdateFailed": "Échec de la mise à jour du modèle de base pour les modèles sélectionnés",
|
"bulkBaseModelUpdateFailed": "Échec de la mise à jour du modèle de base pour les modèles sélectionnés",
|
||||||
|
"bulkContentRatingUpdating": "Mise à jour de la classification du contenu pour {count} modèle(s)...",
|
||||||
|
"bulkContentRatingSet": "Classification du contenu définie sur {level} pour {count} modèle(s)",
|
||||||
|
"bulkContentRatingPartial": "Classification du contenu définie sur {level} pour {success} modèle(s), {failed} échec(s)",
|
||||||
|
"bulkContentRatingFailed": "Impossible de mettre à jour la classification du contenu pour les modèles sélectionnés",
|
||||||
"invalidCharactersRemoved": "Caractères invalides supprimés du nom de fichier",
|
"invalidCharactersRemoved": "Caractères invalides supprimés du nom de fichier",
|
||||||
"filenameCannotBeEmpty": "Le nom de fichier ne peut pas être vide",
|
"filenameCannotBeEmpty": "Le nom de fichier ne peut pas être vide",
|
||||||
"renameFailed": "Échec du renommage du fichier : {message}",
|
"renameFailed": "Échec du renommage du fichier : {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "Mode compact {state}",
|
"compactModeToggled": "Mode compact {state}",
|
||||||
"settingSaveFailed": "Échec de la sauvegarde du paramètre : {message}",
|
"settingSaveFailed": "Échec de la sauvegarde du paramètre : {message}",
|
||||||
"displayDensitySet": "Densité d'affichage définie sur {density}",
|
"displayDensitySet": "Densité d'affichage définie sur {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "Échec du changement de langue : {message}",
|
"languageChangeFailed": "Échec du changement de langue : {message}",
|
||||||
"cacheCleared": "Les fichiers de cache ont été vidés avec succès. Le cache sera reconstruit à la prochaine action.",
|
"cacheCleared": "Les fichiers de cache ont été vidés avec succès. Le cache sera reconstruit à la prochaine action.",
|
||||||
"cacheClearFailed": "Échec du vidage du cache : {error}",
|
"cacheClearFailed": "Échec du vidage du cache : {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "Actualiser maintenant",
|
"refreshNow": "Actualiser maintenant",
|
||||||
"refreshingIn": "Actualisation dans",
|
"refreshingIn": "Actualisation dans",
|
||||||
"seconds": "secondes"
|
"seconds": "secondes"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1267
locales/he.json
Normal file
1267
locales/he.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0バイト",
|
"zero": "0バイト",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "このモデルのCivitaiでのリモート例画像は利用できません"
|
"noRemoteImagesAvailable": "このモデルのCivitaiでのリモート例画像は利用できません"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "例画像をダウンロード",
|
||||||
|
"missingPath": "例画像をダウンロードする前にダウンロード場所を設定してください。",
|
||||||
|
"unavailable": "例画像のダウンロードはまだ利用できません。ページの読み込みが完了してから再度お試しください。"
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "例画像フォルダをクリーンアップ",
|
||||||
|
"success": "{count} 個のフォルダを削除フォルダに移動しました",
|
||||||
|
"none": "クリーンアップが必要な例画像フォルダはありません",
|
||||||
|
"partial": "クリーンアップが完了しましたが、{failures} 個のフォルダはスキップされました",
|
||||||
|
"error": "例画像フォルダのクリーンアップに失敗しました:{message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Civitai APIキー",
|
"civitaiApiKey": "Civitai APIキー",
|
||||||
"civitaiApiKeyPlaceholder": "Civitai APIキーを入力してください",
|
"civitaiApiKeyPlaceholder": "Civitai APIキーを入力してください",
|
||||||
"civitaiApiKeyHelp": "Civitaiからモデルをダウンロードするときの認証に使用されます",
|
"civitaiApiKeyHelp": "Civitaiからモデルをダウンロードするときの認証に使用されます",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "設定フォルダーを開く",
|
||||||
|
"tooltip": "settings.json を含むフォルダーを開きます",
|
||||||
|
"success": "settings.json フォルダーを開きました",
|
||||||
|
"failed": "settings.json フォルダーを開けませんでした"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "コンテンツフィルタリング",
|
"contentFiltering": "コンテンツフィルタリング",
|
||||||
"videoSettings": "動画設定",
|
"videoSettings": "動画設定",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "アクティブライブラリ",
|
||||||
|
"activeLibraryHelp": "設定済みのライブラリを切り替えてデフォルトのフォルダを更新します。選択を変更するとページが再読み込みされます。",
|
||||||
|
"loadingLibraries": "ライブラリを読み込み中...",
|
||||||
|
"noLibraries": "ライブラリが設定されていません",
|
||||||
"defaultLoraRoot": "デフォルトLoRAルート",
|
"defaultLoraRoot": "デフォルトLoRAルート",
|
||||||
"defaultLoraRootHelp": "ダウンロード、インポート、移動用のデフォルトLoRAルートディレクトリを設定",
|
"defaultLoraRootHelp": "ダウンロード、インポート、移動用のデフォルトLoRAルートディレクトリを設定",
|
||||||
"defaultCheckpointRoot": "デフォルトCheckpointルート",
|
"defaultCheckpointRoot": "デフォルトCheckpointルート",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "選択中を表示",
|
"viewSelected": "選択中を表示",
|
||||||
"addTags": "すべてにタグを追加",
|
"addTags": "すべてにタグを追加",
|
||||||
"setBaseModel": "すべてにベースモデルを設定",
|
"setBaseModel": "すべてにベースモデルを設定",
|
||||||
|
"setContentRating": "すべてのモデルのコンテンツレーティングを設定",
|
||||||
"copyAll": "すべての構文をコピー",
|
"copyAll": "すべての構文をコピー",
|
||||||
"refreshAll": "すべてのメタデータを更新",
|
"refreshAll": "すべてのメタデータを更新",
|
||||||
"moveAll": "すべてをフォルダに移動",
|
"moveAll": "すべてをフォルダに移動",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "コンテンツレーティングを設定",
|
"title": "コンテンツレーティングを設定",
|
||||||
"current": "現在",
|
"current": "現在",
|
||||||
|
"multiple": "複数の値",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "強度最小",
|
"strengthMin": "強度最小",
|
||||||
"strengthMax": "強度最大",
|
"strengthMax": "強度最大",
|
||||||
"strength": "強度",
|
"strength": "強度",
|
||||||
|
"clipStrength": "クリップ強度",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "値",
|
"valuePlaceholder": "値",
|
||||||
"add": "追加"
|
"add": "追加"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "{count} モデルのベースモデルが正常に更新されました",
|
"bulkBaseModelUpdateSuccess": "{count} モデルのベースモデルが正常に更新されました",
|
||||||
"bulkBaseModelUpdatePartial": "{success} モデルを更新、{failed} モデルは失敗しました",
|
"bulkBaseModelUpdatePartial": "{success} モデルを更新、{failed} モデルは失敗しました",
|
||||||
"bulkBaseModelUpdateFailed": "選択したモデルのベースモデルの更新に失敗しました",
|
"bulkBaseModelUpdateFailed": "選択したモデルのベースモデルの更新に失敗しました",
|
||||||
|
"bulkContentRatingUpdating": "{count} 件のモデルのコンテンツレーティングを更新中...",
|
||||||
|
"bulkContentRatingSet": "{count} 件のモデルのコンテンツレーティングを {level} に設定しました",
|
||||||
|
"bulkContentRatingPartial": "{success} 件のモデルのコンテンツレーティングを {level} に設定、{failed} 件は失敗しました",
|
||||||
|
"bulkContentRatingFailed": "選択したモデルのコンテンツレーティングを更新できませんでした",
|
||||||
"invalidCharactersRemoved": "ファイル名から無効な文字が削除されました",
|
"invalidCharactersRemoved": "ファイル名から無効な文字が削除されました",
|
||||||
"filenameCannotBeEmpty": "ファイル名を空にすることはできません",
|
"filenameCannotBeEmpty": "ファイル名を空にすることはできません",
|
||||||
"renameFailed": "ファイル名の変更に失敗しました:{message}",
|
"renameFailed": "ファイル名の変更に失敗しました:{message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "コンパクトモード {state}",
|
"compactModeToggled": "コンパクトモード {state}",
|
||||||
"settingSaveFailed": "設定の保存に失敗しました:{message}",
|
"settingSaveFailed": "設定の保存に失敗しました:{message}",
|
||||||
"displayDensitySet": "表示密度が {density} に設定されました",
|
"displayDensitySet": "表示密度が {density} に設定されました",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "言語の変更に失敗しました:{message}",
|
"languageChangeFailed": "言語の変更に失敗しました:{message}",
|
||||||
"cacheCleared": "キャッシュファイルが正常にクリアされました。次回のアクションでキャッシュが再構築されます。",
|
"cacheCleared": "キャッシュファイルが正常にクリアされました。次回のアクションでキャッシュが再構築されます。",
|
||||||
"cacheClearFailed": "キャッシュのクリアに失敗しました:{error}",
|
"cacheClearFailed": "キャッシュのクリアに失敗しました:{error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "今すぐ更新",
|
"refreshNow": "今すぐ更新",
|
||||||
"refreshingIn": "更新まで",
|
"refreshingIn": "更新まで",
|
||||||
"seconds": "秒"
|
"seconds": "秒"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 바이트",
|
"zero": "0 바이트",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "Civitai에서 이 모델의 원격 예시 이미지를 사용할 수 없습니다"
|
"noRemoteImagesAvailable": "Civitai에서 이 모델의 원격 예시 이미지를 사용할 수 없습니다"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "예시 이미지 다운로드",
|
||||||
|
"missingPath": "예시 이미지를 다운로드하기 전에 다운로드 위치를 설정하세요.",
|
||||||
|
"unavailable": "예시 이미지 다운로드는 아직 사용할 수 없습니다. 페이지 로딩이 완료된 후 다시 시도하세요."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "예시 이미지 폴더 정리",
|
||||||
|
"success": "{count}개의 폴더가 삭제 폴더로 이동되었습니다",
|
||||||
|
"none": "정리가 필요한 예시 이미지 폴더가 없습니다",
|
||||||
|
"partial": "정리가 완료되었으나 {failures}개의 폴더가 건너뛰어졌습니다",
|
||||||
|
"error": "예시 이미지 폴더 정리에 실패했습니다: {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Civitai API 키",
|
"civitaiApiKey": "Civitai API 키",
|
||||||
"civitaiApiKeyPlaceholder": "Civitai API 키를 입력하세요",
|
"civitaiApiKeyPlaceholder": "Civitai API 키를 입력하세요",
|
||||||
"civitaiApiKeyHelp": "Civitai에서 모델을 다운로드할 때 인증에 사용됩니다",
|
"civitaiApiKeyHelp": "Civitai에서 모델을 다운로드할 때 인증에 사용됩니다",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "설정 폴더 열기",
|
||||||
|
"tooltip": "settings.json이 있는 폴더를 엽니다",
|
||||||
|
"success": "settings.json 폴더를 열었습니다",
|
||||||
|
"failed": "settings.json 폴더를 열지 못했습니다"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "콘텐츠 필터링",
|
"contentFiltering": "콘텐츠 필터링",
|
||||||
"videoSettings": "비디오 설정",
|
"videoSettings": "비디오 설정",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "활성 라이브러리",
|
||||||
|
"activeLibraryHelp": "구성된 라이브러리를 전환하여 기본 폴더를 업데이트합니다. 선택을 변경하면 페이지가 다시 로드됩니다.",
|
||||||
|
"loadingLibraries": "라이브러리를 불러오는 중...",
|
||||||
|
"noLibraries": "구성된 라이브러리가 없습니다",
|
||||||
"defaultLoraRoot": "기본 LoRA 루트",
|
"defaultLoraRoot": "기본 LoRA 루트",
|
||||||
"defaultLoraRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 LoRA 루트 디렉토리를 설정합니다",
|
"defaultLoraRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 LoRA 루트 디렉토리를 설정합니다",
|
||||||
"defaultCheckpointRoot": "기본 Checkpoint 루트",
|
"defaultCheckpointRoot": "기본 Checkpoint 루트",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "선택 항목 보기",
|
"viewSelected": "선택 항목 보기",
|
||||||
"addTags": "모두에 태그 추가",
|
"addTags": "모두에 태그 추가",
|
||||||
"setBaseModel": "모두에 베이스 모델 설정",
|
"setBaseModel": "모두에 베이스 모델 설정",
|
||||||
|
"setContentRating": "모든 모델에 콘텐츠 등급 설정",
|
||||||
"copyAll": "모든 문법 복사",
|
"copyAll": "모든 문법 복사",
|
||||||
"refreshAll": "모든 메타데이터 새로고침",
|
"refreshAll": "모든 메타데이터 새로고침",
|
||||||
"moveAll": "모두 폴더로 이동",
|
"moveAll": "모두 폴더로 이동",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "콘텐츠 등급 설정",
|
"title": "콘텐츠 등급 설정",
|
||||||
"current": "현재",
|
"current": "현재",
|
||||||
|
"multiple": "여러 값",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "최소 강도",
|
"strengthMin": "최소 강도",
|
||||||
"strengthMax": "최대 강도",
|
"strengthMax": "최대 강도",
|
||||||
"strength": "강도",
|
"strength": "강도",
|
||||||
|
"clipStrength": "클립 강도",
|
||||||
"clipSkip": "클립 스킵",
|
"clipSkip": "클립 스킵",
|
||||||
"valuePlaceholder": "값",
|
"valuePlaceholder": "값",
|
||||||
"add": "추가"
|
"add": "추가"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "{count}개의 모델에 베이스 모델이 성공적으로 업데이트되었습니다",
|
"bulkBaseModelUpdateSuccess": "{count}개의 모델에 베이스 모델이 성공적으로 업데이트되었습니다",
|
||||||
"bulkBaseModelUpdatePartial": "{success}개의 모델이 업데이트되었고, {failed}개의 모델이 실패했습니다",
|
"bulkBaseModelUpdatePartial": "{success}개의 모델이 업데이트되었고, {failed}개의 모델이 실패했습니다",
|
||||||
"bulkBaseModelUpdateFailed": "선택한 모델의 베이스 모델 업데이트에 실패했습니다",
|
"bulkBaseModelUpdateFailed": "선택한 모델의 베이스 모델 업데이트에 실패했습니다",
|
||||||
|
"bulkContentRatingUpdating": "{count}개 모델의 콘텐츠 등급을 업데이트하는 중...",
|
||||||
|
"bulkContentRatingSet": "{count}개 모델의 콘텐츠 등급을 {level}(으)로 설정했습니다",
|
||||||
|
"bulkContentRatingPartial": "{success}개 모델의 콘텐츠 등급을 {level}(으)로 설정했고, {failed}개는 실패했습니다",
|
||||||
|
"bulkContentRatingFailed": "선택한 모델의 콘텐츠 등급을 업데이트하지 못했습니다",
|
||||||
"invalidCharactersRemoved": "파일명에서 잘못된 문자가 제거되었습니다",
|
"invalidCharactersRemoved": "파일명에서 잘못된 문자가 제거되었습니다",
|
||||||
"filenameCannotBeEmpty": "파일 이름은 비어있을 수 없습니다",
|
"filenameCannotBeEmpty": "파일 이름은 비어있을 수 없습니다",
|
||||||
"renameFailed": "파일 이름 변경 실패: {message}",
|
"renameFailed": "파일 이름 변경 실패: {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "컴팩트 모드 {state}",
|
"compactModeToggled": "컴팩트 모드 {state}",
|
||||||
"settingSaveFailed": "설정 저장 실패: {message}",
|
"settingSaveFailed": "설정 저장 실패: {message}",
|
||||||
"displayDensitySet": "표시 밀도가 {density}로 설정되었습니다",
|
"displayDensitySet": "표시 밀도가 {density}로 설정되었습니다",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "언어 변경 실패: {message}",
|
"languageChangeFailed": "언어 변경 실패: {message}",
|
||||||
"cacheCleared": "캐시 파일이 성공적으로 지워졌습니다. 다음 작업 시 캐시가 재구축됩니다.",
|
"cacheCleared": "캐시 파일이 성공적으로 지워졌습니다. 다음 작업 시 캐시가 재구축됩니다.",
|
||||||
"cacheClearFailed": "캐시 지우기 실패: {error}",
|
"cacheClearFailed": "캐시 지우기 실패: {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "지금 새로고침",
|
"refreshNow": "지금 새로고침",
|
||||||
"refreshingIn": "새로고침까지",
|
"refreshingIn": "새로고침까지",
|
||||||
"seconds": "초"
|
"seconds": "초"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 Байт",
|
"zero": "0 Байт",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "Нет удаленных примеров изображений для этой модели на Civitai"
|
"noRemoteImagesAvailable": "Нет удаленных примеров изображений для этой модели на Civitai"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "Загрузить примеры изображений",
|
||||||
|
"missingPath": "Укажите место загрузки перед загрузкой примеров изображений.",
|
||||||
|
"unavailable": "Загрузка примеров изображений пока недоступна. Попробуйте снова после полной загрузки страницы."
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "Очистить папки с примерами изображений",
|
||||||
|
"success": "Перемещено {count} папок в папку удалённых",
|
||||||
|
"none": "Нет папок с примерами изображений, требующих очистки",
|
||||||
|
"partial": "Очистка завершена, пропущено {failures} папок",
|
||||||
|
"error": "Не удалось очистить папки с примерами изображений: {message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA Manager",
|
"appTitle": "LoRA Manager",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Ключ API Civitai",
|
"civitaiApiKey": "Ключ API Civitai",
|
||||||
"civitaiApiKeyPlaceholder": "Введите ваш ключ API Civitai",
|
"civitaiApiKeyPlaceholder": "Введите ваш ключ API Civitai",
|
||||||
"civitaiApiKeyHelp": "Используется для аутентификации при загрузке моделей с Civitai",
|
"civitaiApiKeyHelp": "Используется для аутентификации при загрузке моделей с Civitai",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "Открыть папку настроек",
|
||||||
|
"tooltip": "Открыть папку, содержащую settings.json",
|
||||||
|
"success": "Папка settings.json открыта",
|
||||||
|
"failed": "Не удалось открыть папку settings.json"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "Фильтрация контента",
|
"contentFiltering": "Фильтрация контента",
|
||||||
"videoSettings": "Настройки видео",
|
"videoSettings": "Настройки видео",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "Активная библиотека",
|
||||||
|
"activeLibraryHelp": "Переключайтесь между настроенными библиотеками, чтобы обновить папки по умолчанию. Изменение выбора перезагружает страницу.",
|
||||||
|
"loadingLibraries": "Загрузка библиотек...",
|
||||||
|
"noLibraries": "Библиотеки не настроены",
|
||||||
"defaultLoraRoot": "Корневая папка LoRA по умолчанию",
|
"defaultLoraRoot": "Корневая папка LoRA по умолчанию",
|
||||||
"defaultLoraRootHelp": "Установить корневую папку LoRA по умолчанию для загрузок, импорта и перемещений",
|
"defaultLoraRootHelp": "Установить корневую папку LoRA по умолчанию для загрузок, импорта и перемещений",
|
||||||
"defaultCheckpointRoot": "Корневая папка Checkpoint по умолчанию",
|
"defaultCheckpointRoot": "Корневая папка Checkpoint по умолчанию",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "Просмотреть выбранные",
|
"viewSelected": "Просмотреть выбранные",
|
||||||
"addTags": "Добавить теги ко всем",
|
"addTags": "Добавить теги ко всем",
|
||||||
"setBaseModel": "Установить базовую модель для всех",
|
"setBaseModel": "Установить базовую модель для всех",
|
||||||
|
"setContentRating": "Установить рейтинг контента для всех",
|
||||||
"copyAll": "Копировать весь синтаксис",
|
"copyAll": "Копировать весь синтаксис",
|
||||||
"refreshAll": "Обновить все метаданные",
|
"refreshAll": "Обновить все метаданные",
|
||||||
"moveAll": "Переместить все в папку",
|
"moveAll": "Переместить все в папку",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "Установить рейтинг контента",
|
"title": "Установить рейтинг контента",
|
||||||
"current": "Текущий",
|
"current": "Текущий",
|
||||||
|
"multiple": "Несколько значений",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "Мин. сила",
|
"strengthMin": "Мин. сила",
|
||||||
"strengthMax": "Макс. сила",
|
"strengthMax": "Макс. сила",
|
||||||
"strength": "Сила",
|
"strength": "Сила",
|
||||||
|
"clipStrength": "Сила клипа",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Значение",
|
"valuePlaceholder": "Значение",
|
||||||
"add": "Добавить"
|
"add": "Добавить"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "Базовая модель успешно обновлена для {count} моделей",
|
"bulkBaseModelUpdateSuccess": "Базовая модель успешно обновлена для {count} моделей",
|
||||||
"bulkBaseModelUpdatePartial": "Обновлено {success} моделей, не удалось обновить {failed} моделей",
|
"bulkBaseModelUpdatePartial": "Обновлено {success} моделей, не удалось обновить {failed} моделей",
|
||||||
"bulkBaseModelUpdateFailed": "Не удалось обновить базовую модель для выбранных моделей",
|
"bulkBaseModelUpdateFailed": "Не удалось обновить базовую модель для выбранных моделей",
|
||||||
|
"bulkContentRatingUpdating": "Обновление рейтинга контента для {count} модель(ей)...",
|
||||||
|
"bulkContentRatingSet": "Рейтинг контента установлен на {level} для {count} модель(ей)",
|
||||||
|
"bulkContentRatingPartial": "Рейтинг контента {level} установлен для {success} модель(ей), {failed} не удалось",
|
||||||
|
"bulkContentRatingFailed": "Не удалось обновить рейтинг контента для выбранных моделей",
|
||||||
"invalidCharactersRemoved": "Недопустимые символы удалены из имени файла",
|
"invalidCharactersRemoved": "Недопустимые символы удалены из имени файла",
|
||||||
"filenameCannotBeEmpty": "Имя файла не может быть пустым",
|
"filenameCannotBeEmpty": "Имя файла не может быть пустым",
|
||||||
"renameFailed": "Не удалось переименовать файл: {message}",
|
"renameFailed": "Не удалось переименовать файл: {message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "Компактный режим {state}",
|
"compactModeToggled": "Компактный режим {state}",
|
||||||
"settingSaveFailed": "Не удалось сохранить настройку: {message}",
|
"settingSaveFailed": "Не удалось сохранить настройку: {message}",
|
||||||
"displayDensitySet": "Плотность отображения установлена на {density}",
|
"displayDensitySet": "Плотность отображения установлена на {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "Не удалось изменить язык: {message}",
|
"languageChangeFailed": "Не удалось изменить язык: {message}",
|
||||||
"cacheCleared": "Файлы кэша успешно очищены. Кэш будет пересобран при следующем действии.",
|
"cacheCleared": "Файлы кэша успешно очищены. Кэш будет пересобран при следующем действии.",
|
||||||
"cacheClearFailed": "Не удалось очистить кэш: {error}",
|
"cacheClearFailed": "Не удалось очистить кэш: {error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "Обновить сейчас",
|
"refreshNow": "Обновить сейчас",
|
||||||
"refreshingIn": "Обновление через",
|
"refreshingIn": "Обновление через",
|
||||||
"seconds": "секунд"
|
"seconds": "секунд"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,11 +21,18 @@
|
|||||||
"disabled": "已禁用"
|
"disabled": "已禁用"
|
||||||
},
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"select": "Language",
|
"select": "选择语言",
|
||||||
"select_help": "Choose your preferred language for the interface",
|
"select_help": "选择你喜欢的界面语言",
|
||||||
"english": "English",
|
"english": "English",
|
||||||
"chinese_simplified": "中文(简体)",
|
"chinese_simplified": "中文(简体)",
|
||||||
"chinese_traditional": "中文(繁体)",
|
"chinese_traditional": "中文(繁体)",
|
||||||
|
"russian": "俄语",
|
||||||
|
"german": "德语",
|
||||||
|
"japanese": "日语",
|
||||||
|
"korean": "韩语",
|
||||||
|
"french": "法语",
|
||||||
|
"spanish": "西班牙语",
|
||||||
|
"Hebrew": "עברית",
|
||||||
"russian": "Русский",
|
"russian": "Русский",
|
||||||
"german": "Deutsch",
|
"german": "Deutsch",
|
||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
@@ -122,6 +129,20 @@
|
|||||||
"noRemoteImagesAvailable": "此模型在 Civitai 上没有远程示例图片"
|
"noRemoteImagesAvailable": "此模型在 Civitai 上没有远程示例图片"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "下载示例图片",
|
||||||
|
"missingPath": "请先设置下载位置后再下载示例图片。",
|
||||||
|
"unavailable": "示例图片下载当前不可用。请在页面加载完成后重试。"
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "清理示例图片文件夹",
|
||||||
|
"success": "已将 {count} 个文件夹移动到已删除文件夹",
|
||||||
|
"none": "没有需要清理的示例图片文件夹",
|
||||||
|
"partial": "清理完成,有 {failures} 个文件夹跳过",
|
||||||
|
"error": "清理示例图片文件夹失败:{message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA 管理器",
|
"appTitle": "LoRA 管理器",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +194,12 @@
|
|||||||
"civitaiApiKey": "Civitai API 密钥",
|
"civitaiApiKey": "Civitai API 密钥",
|
||||||
"civitaiApiKeyPlaceholder": "请输入你的 Civitai API 密钥",
|
"civitaiApiKeyPlaceholder": "请输入你的 Civitai API 密钥",
|
||||||
"civitaiApiKeyHelp": "用于从 Civitai 下载模型时的身份验证",
|
"civitaiApiKeyHelp": "用于从 Civitai 下载模型时的身份验证",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "打开设置文件夹",
|
||||||
|
"tooltip": "打开包含 settings.json 的文件夹",
|
||||||
|
"success": "已打开 settings.json 文件夹",
|
||||||
|
"failed": "无法打开 settings.json 文件夹"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "内容过滤",
|
"contentFiltering": "内容过滤",
|
||||||
"videoSettings": "视频设置",
|
"videoSettings": "视频设置",
|
||||||
@@ -220,6 +247,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "活动库",
|
||||||
|
"activeLibraryHelp": "在已配置的库之间切换以更新默认文件夹。更改选择将重新加载页面。",
|
||||||
|
"loadingLibraries": "正在加载库...",
|
||||||
|
"noLibraries": "尚未配置库",
|
||||||
"defaultLoraRoot": "默认 LoRA 根目录",
|
"defaultLoraRoot": "默认 LoRA 根目录",
|
||||||
"defaultLoraRootHelp": "设置下载、导入和移动时的默认 LoRA 根目录",
|
"defaultLoraRootHelp": "设置下载、导入和移动时的默认 LoRA 根目录",
|
||||||
"defaultCheckpointRoot": "默认 Checkpoint 根目录",
|
"defaultCheckpointRoot": "默认 Checkpoint 根目录",
|
||||||
@@ -368,6 +399,7 @@
|
|||||||
"viewSelected": "查看已选中",
|
"viewSelected": "查看已选中",
|
||||||
"addTags": "为所有添加标签",
|
"addTags": "为所有添加标签",
|
||||||
"setBaseModel": "为所有设置基础模型",
|
"setBaseModel": "为所有设置基础模型",
|
||||||
|
"setContentRating": "为全部设置内容评级",
|
||||||
"copyAll": "复制全部语法",
|
"copyAll": "复制全部语法",
|
||||||
"refreshAll": "刷新全部元数据",
|
"refreshAll": "刷新全部元数据",
|
||||||
"moveAll": "全部移动到文件夹",
|
"moveAll": "全部移动到文件夹",
|
||||||
@@ -592,6 +624,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "设置内容评级",
|
"title": "设置内容评级",
|
||||||
"current": "当前",
|
"current": "当前",
|
||||||
|
"multiple": "多个值",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +760,7 @@
|
|||||||
"strengthMin": "最小强度",
|
"strengthMin": "最小强度",
|
||||||
"strengthMax": "最大强度",
|
"strengthMax": "最大强度",
|
||||||
"strength": "强度",
|
"strength": "强度",
|
||||||
|
"clipStrength": "Clip 强度",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "数值",
|
"valuePlaceholder": "数值",
|
||||||
"add": "添加"
|
"add": "添加"
|
||||||
@@ -1069,6 +1103,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "成功为 {count} 个模型更新基础模型",
|
"bulkBaseModelUpdateSuccess": "成功为 {count} 个模型更新基础模型",
|
||||||
"bulkBaseModelUpdatePartial": "更新了 {success} 个模型,{failed} 个失败",
|
"bulkBaseModelUpdatePartial": "更新了 {success} 个模型,{failed} 个失败",
|
||||||
"bulkBaseModelUpdateFailed": "为选中模型更新基础模型失败",
|
"bulkBaseModelUpdateFailed": "为选中模型更新基础模型失败",
|
||||||
|
"bulkContentRatingUpdating": "正在为 {count} 个模型更新内容评级...",
|
||||||
|
"bulkContentRatingSet": "已将 {count} 个模型的内容评级设置为 {level}",
|
||||||
|
"bulkContentRatingPartial": "已将 {success} 个模型的内容评级设置为 {level},{failed} 个失败",
|
||||||
|
"bulkContentRatingFailed": "未能更新所选模型的内容评级",
|
||||||
"invalidCharactersRemoved": "文件名中的无效字符已移除",
|
"invalidCharactersRemoved": "文件名中的无效字符已移除",
|
||||||
"filenameCannotBeEmpty": "文件名不能为空",
|
"filenameCannotBeEmpty": "文件名不能为空",
|
||||||
"renameFailed": "重命名文件失败:{message}",
|
"renameFailed": "重命名文件失败:{message}",
|
||||||
@@ -1103,6 +1141,8 @@
|
|||||||
"compactModeToggled": "紧凑模式 {state}",
|
"compactModeToggled": "紧凑模式 {state}",
|
||||||
"settingSaveFailed": "保存设置失败:{message}",
|
"settingSaveFailed": "保存设置失败:{message}",
|
||||||
"displayDensitySet": "显示密度已设置为 {density}",
|
"displayDensitySet": "显示密度已设置为 {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "切换语言失败:{message}",
|
"languageChangeFailed": "切换语言失败:{message}",
|
||||||
"cacheCleared": "缓存文件已成功清除。下次操作将重建缓存。",
|
"cacheCleared": "缓存文件已成功清除。下次操作将重建缓存。",
|
||||||
"cacheClearFailed": "清除缓存失败:{error}",
|
"cacheClearFailed": "清除缓存失败:{error}",
|
||||||
@@ -1222,6 +1262,12 @@
|
|||||||
"refreshNow": "立即刷新",
|
"refreshNow": "立即刷新",
|
||||||
"refreshingIn": "将在",
|
"refreshingIn": "将在",
|
||||||
"seconds": "秒后刷新"
|
"seconds": "秒后刷新"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,8 @@
|
|||||||
"japanese": "日本語",
|
"japanese": "日本語",
|
||||||
"korean": "한국어",
|
"korean": "한국어",
|
||||||
"french": "Français",
|
"french": "Français",
|
||||||
"spanish": "Español"
|
"spanish": "Español",
|
||||||
|
"Hebrew": "עברית"
|
||||||
},
|
},
|
||||||
"fileSize": {
|
"fileSize": {
|
||||||
"zero": "0 位元組",
|
"zero": "0 位元組",
|
||||||
@@ -122,6 +123,20 @@
|
|||||||
"noRemoteImagesAvailable": "此模型在 Civitai 上無遠端範例圖片"
|
"noRemoteImagesAvailable": "此模型在 Civitai 上無遠端範例圖片"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"globalContextMenu": {
|
||||||
|
"downloadExampleImages": {
|
||||||
|
"label": "下載範例圖片",
|
||||||
|
"missingPath": "請先設定下載位置再下載範例圖片。",
|
||||||
|
"unavailable": "範例圖片下載目前尚不可用。請在頁面載入完成後再試一次。"
|
||||||
|
},
|
||||||
|
"cleanupExampleImages": {
|
||||||
|
"label": "清理範例圖片資料夾",
|
||||||
|
"success": "已將 {count} 個資料夾移至已刪除資料夾",
|
||||||
|
"none": "沒有需要清理的範例圖片資料夾",
|
||||||
|
"partial": "清理完成,有 {failures} 個資料夾略過",
|
||||||
|
"error": "清理範例圖片資料夾失敗:{message}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"header": {
|
"header": {
|
||||||
"appTitle": "LoRA 管理器",
|
"appTitle": "LoRA 管理器",
|
||||||
"navigation": {
|
"navigation": {
|
||||||
@@ -173,6 +188,12 @@
|
|||||||
"civitaiApiKey": "Civitai API 金鑰",
|
"civitaiApiKey": "Civitai API 金鑰",
|
||||||
"civitaiApiKeyPlaceholder": "請輸入您的 Civitai API 金鑰",
|
"civitaiApiKeyPlaceholder": "請輸入您的 Civitai API 金鑰",
|
||||||
"civitaiApiKeyHelp": "用於從 Civitai 下載模型時的身份驗證",
|
"civitaiApiKeyHelp": "用於從 Civitai 下載模型時的身份驗證",
|
||||||
|
"openSettingsFileLocation": {
|
||||||
|
"label": "開啟設定資料夾",
|
||||||
|
"tooltip": "開啟包含 settings.json 的資料夾",
|
||||||
|
"success": "已開啟 settings.json 資料夾",
|
||||||
|
"failed": "無法開啟 settings.json 資料夾"
|
||||||
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"contentFiltering": "內容過濾",
|
"contentFiltering": "內容過濾",
|
||||||
"videoSettings": "影片設定",
|
"videoSettings": "影片設定",
|
||||||
@@ -220,6 +241,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"folderSettings": {
|
"folderSettings": {
|
||||||
|
"activeLibrary": "使用中的資料庫",
|
||||||
|
"activeLibraryHelp": "在已設定的資料庫之間切換以更新預設資料夾。變更選項會重新載入頁面。",
|
||||||
|
"loadingLibraries": "正在載入資料庫...",
|
||||||
|
"noLibraries": "尚未設定任何資料庫",
|
||||||
"defaultLoraRoot": "預設 LoRA 根目錄",
|
"defaultLoraRoot": "預設 LoRA 根目錄",
|
||||||
"defaultLoraRootHelp": "設定下載、匯入和移動時的預設 LoRA 根目錄",
|
"defaultLoraRootHelp": "設定下載、匯入和移動時的預設 LoRA 根目錄",
|
||||||
"defaultCheckpointRoot": "預設 Checkpoint 根目錄",
|
"defaultCheckpointRoot": "預設 Checkpoint 根目錄",
|
||||||
@@ -368,6 +393,7 @@
|
|||||||
"viewSelected": "檢視已選取",
|
"viewSelected": "檢視已選取",
|
||||||
"addTags": "新增標籤到全部",
|
"addTags": "新增標籤到全部",
|
||||||
"setBaseModel": "設定全部基礎模型",
|
"setBaseModel": "設定全部基礎模型",
|
||||||
|
"setContentRating": "為全部設定內容分級",
|
||||||
"copyAll": "複製全部語法",
|
"copyAll": "複製全部語法",
|
||||||
"refreshAll": "刷新全部 metadata",
|
"refreshAll": "刷新全部 metadata",
|
||||||
"moveAll": "全部移動到資料夾",
|
"moveAll": "全部移動到資料夾",
|
||||||
@@ -592,6 +618,7 @@
|
|||||||
"contentRating": {
|
"contentRating": {
|
||||||
"title": "設定內容分級",
|
"title": "設定內容分級",
|
||||||
"current": "目前",
|
"current": "目前",
|
||||||
|
"multiple": "多個值",
|
||||||
"levels": {
|
"levels": {
|
||||||
"pg": "PG",
|
"pg": "PG",
|
||||||
"pg13": "PG13",
|
"pg13": "PG13",
|
||||||
@@ -727,6 +754,7 @@
|
|||||||
"strengthMin": "最小強度",
|
"strengthMin": "最小強度",
|
||||||
"strengthMax": "最大強度",
|
"strengthMax": "最大強度",
|
||||||
"strength": "強度",
|
"strength": "強度",
|
||||||
|
"clipStrength": "Clip 強度",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "數值",
|
"valuePlaceholder": "數值",
|
||||||
"add": "新增"
|
"add": "新增"
|
||||||
@@ -1069,6 +1097,10 @@
|
|||||||
"bulkBaseModelUpdateSuccess": "已成功為 {count} 個模型更新基礎模型",
|
"bulkBaseModelUpdateSuccess": "已成功為 {count} 個模型更新基礎模型",
|
||||||
"bulkBaseModelUpdatePartial": "已更新 {success} 個模型,{failed} 個模型失敗",
|
"bulkBaseModelUpdatePartial": "已更新 {success} 個模型,{failed} 個模型失敗",
|
||||||
"bulkBaseModelUpdateFailed": "更新所選模型的基礎模型失敗",
|
"bulkBaseModelUpdateFailed": "更新所選模型的基礎模型失敗",
|
||||||
|
"bulkContentRatingUpdating": "正在為 {count} 個模型更新內容分級...",
|
||||||
|
"bulkContentRatingSet": "已將 {count} 個模型的內容分級設定為 {level}",
|
||||||
|
"bulkContentRatingPartial": "已將 {success} 個模型的內容分級設定為 {level},{failed} 個失敗",
|
||||||
|
"bulkContentRatingFailed": "無法更新所選模型的內容分級",
|
||||||
"invalidCharactersRemoved": "已移除檔名中的無效字元",
|
"invalidCharactersRemoved": "已移除檔名中的無效字元",
|
||||||
"filenameCannotBeEmpty": "檔案名稱不可為空",
|
"filenameCannotBeEmpty": "檔案名稱不可為空",
|
||||||
"renameFailed": "重新命名檔案失敗:{message}",
|
"renameFailed": "重新命名檔案失敗:{message}",
|
||||||
@@ -1103,6 +1135,8 @@
|
|||||||
"compactModeToggled": "緊湊模式已{state}",
|
"compactModeToggled": "緊湊模式已{state}",
|
||||||
"settingSaveFailed": "儲存設定失敗:{message}",
|
"settingSaveFailed": "儲存設定失敗:{message}",
|
||||||
"displayDensitySet": "顯示密度已設為 {density}",
|
"displayDensitySet": "顯示密度已設為 {density}",
|
||||||
|
"libraryLoadFailed": "Failed to load libraries: {message}",
|
||||||
|
"libraryActivateFailed": "Failed to activate library: {message}",
|
||||||
"languageChangeFailed": "切換語言失敗:{message}",
|
"languageChangeFailed": "切換語言失敗:{message}",
|
||||||
"cacheCleared": "快取檔案已成功清除。快取將於下次操作時重建。",
|
"cacheCleared": "快取檔案已成功清除。快取將於下次操作時重建。",
|
||||||
"cacheClearFailed": "清除快取失敗:{error}",
|
"cacheClearFailed": "清除快取失敗:{error}",
|
||||||
@@ -1222,6 +1256,12 @@
|
|||||||
"refreshNow": "立即重新整理",
|
"refreshNow": "立即重新整理",
|
||||||
"refreshingIn": "將於",
|
"refreshingIn": "將於",
|
||||||
"seconds": "秒後重新整理"
|
"seconds": "秒後重新整理"
|
||||||
|
},
|
||||||
|
"communitySupport": {
|
||||||
|
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||||
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
|
"supportCta": "Support on Ko-fi",
|
||||||
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
2572
package-lock.json
generated
Normal file
2572
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
package.json
Normal file
15
package.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "comfyui-lora-manager-frontend",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"test:coverage": "node scripts/run_frontend_coverage.js"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"jsdom": "^24.0.0",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
"""Project namespace package."""
|
||||||
|
|
||||||
|
# pytest's internal compatibility layer still imports ``py.path.local`` from the
|
||||||
|
# historical ``py`` dependency. Because this project reuses the ``py`` package
|
||||||
|
# name, we expose a minimal shim so ``py.path.local`` resolves to ``pathlib.Path``
|
||||||
|
# during test runs without pulling in the external dependency.
|
||||||
|
from pathlib import Path
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
path = SimpleNamespace(local=Path)
|
||||||
|
|
||||||
|
__all__ = ["path"]
|
||||||
|
|||||||
437
py/config.py
437
py/config.py
@@ -1,17 +1,50 @@
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
from pathlib import Path
|
||||||
import folder_paths # type: ignore
|
import folder_paths # type: ignore
|
||||||
from typing import List
|
from typing import Dict, Iterable, List, Mapping, Set
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
# Check if running in standalone mode
|
from .utils.settings_paths import ensure_settings_file
|
||||||
standalone_mode = 'nodes' not in sys.modules
|
|
||||||
|
# Use an environment variable to control standalone mode
|
||||||
|
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_folder_paths_for_comparison(
|
||||||
|
folder_paths: Mapping[str, Iterable[str]]
|
||||||
|
) -> Dict[str, Set[str]]:
|
||||||
|
"""Normalize folder paths for comparison across libraries."""
|
||||||
|
|
||||||
|
normalized: Dict[str, Set[str]] = {}
|
||||||
|
for key, values in folder_paths.items():
|
||||||
|
if isinstance(values, str):
|
||||||
|
candidate_values: Iterable[str] = [values]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
candidate_values = iter(values)
|
||||||
|
except TypeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
normalized_values: Set[str] = set()
|
||||||
|
for value in candidate_values:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
continue
|
||||||
|
stripped = value.strip()
|
||||||
|
if not stripped:
|
||||||
|
continue
|
||||||
|
normalized_values.add(os.path.normcase(os.path.normpath(stripped)))
|
||||||
|
|
||||||
|
if normalized_values:
|
||||||
|
normalized[key] = normalized_values
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Global configuration for LoRA Manager"""
|
"""Global configuration for LoRA Manager"""
|
||||||
|
|
||||||
@@ -20,9 +53,9 @@ class Config:
|
|||||||
self.static_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static')
|
self.static_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static')
|
||||||
self.i18n_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'locales')
|
self.i18n_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'locales')
|
||||||
# Path mapping dictionary, target to link mapping
|
# Path mapping dictionary, target to link mapping
|
||||||
self._path_mappings = {}
|
self._path_mappings: Dict[str, str] = {}
|
||||||
# Static route mapping dictionary, target to route mapping
|
# Normalized preview root directories used to validate preview access
|
||||||
self._route_mappings = {}
|
self._preview_root_paths: Set[Path] = set()
|
||||||
self.loras_roots = self._init_lora_paths()
|
self.loras_roots = self._init_lora_paths()
|
||||||
self.checkpoints_roots = None
|
self.checkpoints_roots = None
|
||||||
self.unet_roots = None
|
self.unet_roots = None
|
||||||
@@ -31,45 +64,73 @@ class Config:
|
|||||||
self.embeddings_roots = self._init_embedding_paths()
|
self.embeddings_roots = self._init_embedding_paths()
|
||||||
# Scan symbolic links during initialization
|
# Scan symbolic links during initialization
|
||||||
self._scan_symbolic_links()
|
self._scan_symbolic_links()
|
||||||
|
self._rebuild_preview_roots()
|
||||||
|
|
||||||
if not standalone_mode:
|
if not standalone_mode:
|
||||||
# Save the paths to settings.json when running in ComfyUI mode
|
# Save the paths to settings.json when running in ComfyUI mode
|
||||||
self.save_folder_paths_to_settings()
|
self.save_folder_paths_to_settings()
|
||||||
|
|
||||||
def save_folder_paths_to_settings(self):
|
def save_folder_paths_to_settings(self):
|
||||||
"""Save folder paths to settings.json for standalone mode to use later"""
|
"""Persist ComfyUI-derived folder paths to the multi-library settings."""
|
||||||
try:
|
try:
|
||||||
# Check if we're running in ComfyUI mode (not standalone)
|
ensure_settings_file(logger)
|
||||||
# Load existing settings
|
from .services.settings_manager import settings as settings_service
|
||||||
settings_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'settings.json')
|
|
||||||
settings = {}
|
|
||||||
if os.path.exists(settings_path):
|
|
||||||
with open(settings_path, 'r', encoding='utf-8') as f:
|
|
||||||
settings = json.load(f)
|
|
||||||
|
|
||||||
# Update settings with paths
|
|
||||||
settings['folder_paths'] = {
|
|
||||||
'loras': self.loras_roots,
|
|
||||||
'checkpoints': self.checkpoints_roots,
|
|
||||||
'unet': self.unet_roots,
|
|
||||||
'embeddings': self.embeddings_roots,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add default roots if there's only one item and key doesn't exist
|
|
||||||
if len(self.loras_roots) == 1 and "default_lora_root" not in settings:
|
|
||||||
settings["default_lora_root"] = self.loras_roots[0]
|
|
||||||
|
|
||||||
if self.checkpoints_roots and len(self.checkpoints_roots) == 1 and "default_checkpoint_root" not in settings:
|
|
||||||
settings["default_checkpoint_root"] = self.checkpoints_roots[0]
|
|
||||||
|
|
||||||
if self.embeddings_roots and len(self.embeddings_roots) == 1 and "default_embedding_root" not in settings:
|
libraries = settings_service.get_libraries()
|
||||||
settings["default_embedding_root"] = self.embeddings_roots[0]
|
comfy_library = libraries.get("comfyui", {})
|
||||||
|
default_library = libraries.get("default", {})
|
||||||
# Save settings
|
|
||||||
with open(settings_path, 'w', encoding='utf-8') as f:
|
target_folder_paths = {
|
||||||
json.dump(settings, f, indent=2)
|
'loras': list(self.loras_roots),
|
||||||
|
'checkpoints': list(self.checkpoints_roots or []),
|
||||||
logger.info("Saved folder paths to settings.json")
|
'unet': list(self.unet_roots or []),
|
||||||
|
'embeddings': list(self.embeddings_roots or []),
|
||||||
|
}
|
||||||
|
|
||||||
|
normalized_target_paths = _normalize_folder_paths_for_comparison(target_folder_paths)
|
||||||
|
|
||||||
|
if (not comfy_library and default_library and normalized_target_paths and
|
||||||
|
_normalize_folder_paths_for_comparison(default_library.get("folder_paths", {})) ==
|
||||||
|
normalized_target_paths):
|
||||||
|
try:
|
||||||
|
settings_service.rename_library("default", "comfyui")
|
||||||
|
logger.info("Renamed legacy 'default' library to 'comfyui'")
|
||||||
|
libraries = settings_service.get_libraries()
|
||||||
|
comfy_library = libraries.get("comfyui", {})
|
||||||
|
except Exception as rename_error:
|
||||||
|
logger.debug(
|
||||||
|
"Failed to rename legacy 'default' library: %s", rename_error
|
||||||
|
)
|
||||||
|
|
||||||
|
default_lora_root = comfy_library.get("default_lora_root", "")
|
||||||
|
if not default_lora_root and len(self.loras_roots) == 1:
|
||||||
|
default_lora_root = self.loras_roots[0]
|
||||||
|
|
||||||
|
default_checkpoint_root = comfy_library.get("default_checkpoint_root", "")
|
||||||
|
if (not default_checkpoint_root and self.checkpoints_roots and
|
||||||
|
len(self.checkpoints_roots) == 1):
|
||||||
|
default_checkpoint_root = self.checkpoints_roots[0]
|
||||||
|
|
||||||
|
default_embedding_root = comfy_library.get("default_embedding_root", "")
|
||||||
|
if (not default_embedding_root and self.embeddings_roots and
|
||||||
|
len(self.embeddings_roots) == 1):
|
||||||
|
default_embedding_root = self.embeddings_roots[0]
|
||||||
|
|
||||||
|
metadata = dict(comfy_library.get("metadata", {}))
|
||||||
|
metadata.setdefault("display_name", "ComfyUI")
|
||||||
|
metadata["source"] = "comfyui"
|
||||||
|
|
||||||
|
settings_service.upsert_library(
|
||||||
|
"comfyui",
|
||||||
|
folder_paths=target_folder_paths,
|
||||||
|
default_lora_root=default_lora_root,
|
||||||
|
default_checkpoint_root=default_checkpoint_root,
|
||||||
|
default_embedding_root=default_embedding_root,
|
||||||
|
metadata=metadata,
|
||||||
|
activate=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("Updated 'comfyui' library with current folder paths")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to save folder paths: {e}")
|
logger.warning(f"Failed to save folder paths: {e}")
|
||||||
|
|
||||||
@@ -126,12 +187,65 @@ class Config:
|
|||||||
# Keep the original mapping: target path -> link path
|
# Keep the original mapping: target path -> link path
|
||||||
self._path_mappings[normalized_target] = normalized_link
|
self._path_mappings[normalized_target] = normalized_link
|
||||||
logger.info(f"Added path mapping: {normalized_target} -> {normalized_link}")
|
logger.info(f"Added path mapping: {normalized_target} -> {normalized_link}")
|
||||||
|
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
|
||||||
|
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
|
||||||
|
|
||||||
def add_route_mapping(self, path: str, route: str):
|
def _expand_preview_root(self, path: str) -> Set[Path]:
|
||||||
"""Add a static route mapping"""
|
"""Return normalized ``Path`` objects representing a preview root."""
|
||||||
normalized_path = os.path.normpath(path).replace(os.sep, '/')
|
|
||||||
self._route_mappings[normalized_path] = route
|
roots: Set[Path] = set()
|
||||||
# logger.info(f"Added route mapping: {normalized_path} -> {route}")
|
if not path:
|
||||||
|
return roots
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_path = Path(path).expanduser()
|
||||||
|
except Exception:
|
||||||
|
return roots
|
||||||
|
|
||||||
|
if raw_path.is_absolute():
|
||||||
|
roots.add(raw_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
resolved = raw_path.resolve(strict=False)
|
||||||
|
except RuntimeError:
|
||||||
|
resolved = raw_path.absolute()
|
||||||
|
roots.add(resolved)
|
||||||
|
|
||||||
|
try:
|
||||||
|
real_path = raw_path.resolve()
|
||||||
|
except (FileNotFoundError, RuntimeError):
|
||||||
|
real_path = resolved
|
||||||
|
roots.add(real_path)
|
||||||
|
|
||||||
|
normalized: Set[Path] = set()
|
||||||
|
for candidate in roots:
|
||||||
|
if candidate.is_absolute():
|
||||||
|
normalized.add(candidate)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
normalized.add(candidate.resolve(strict=False))
|
||||||
|
except RuntimeError:
|
||||||
|
normalized.add(candidate.absolute())
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def _rebuild_preview_roots(self) -> None:
|
||||||
|
"""Recompute the cache of directories permitted for previews."""
|
||||||
|
|
||||||
|
preview_roots: Set[Path] = set()
|
||||||
|
|
||||||
|
for root in self.loras_roots or []:
|
||||||
|
preview_roots.update(self._expand_preview_root(root))
|
||||||
|
for root in self.base_models_roots or []:
|
||||||
|
preview_roots.update(self._expand_preview_root(root))
|
||||||
|
for root in self.embeddings_roots or []:
|
||||||
|
preview_roots.update(self._expand_preview_root(root))
|
||||||
|
|
||||||
|
for target, link in self._path_mappings.items():
|
||||||
|
preview_roots.update(self._expand_preview_root(target))
|
||||||
|
preview_roots.update(self._expand_preview_root(link))
|
||||||
|
|
||||||
|
self._preview_root_paths = {path for path in preview_roots if path.is_absolute()}
|
||||||
|
|
||||||
def map_path_to_link(self, path: str) -> str:
|
def map_path_to_link(self, path: str) -> str:
|
||||||
"""Map a target path back to its symbolic link path"""
|
"""Map a target path back to its symbolic link path"""
|
||||||
@@ -155,31 +269,93 @@ class Config:
|
|||||||
return mapped_path
|
return mapped_path
|
||||||
return link_path
|
return link_path
|
||||||
|
|
||||||
|
def _dedupe_existing_paths(self, raw_paths: Iterable[str]) -> Dict[str, str]:
|
||||||
|
dedup: Dict[str, str] = {}
|
||||||
|
for path in raw_paths:
|
||||||
|
if not isinstance(path, str):
|
||||||
|
continue
|
||||||
|
if not os.path.exists(path):
|
||||||
|
continue
|
||||||
|
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
||||||
|
normalized = os.path.normpath(path).replace(os.sep, '/')
|
||||||
|
if real_path not in dedup:
|
||||||
|
dedup[real_path] = normalized
|
||||||
|
return dedup
|
||||||
|
|
||||||
|
def _prepare_lora_paths(self, raw_paths: Iterable[str]) -> List[str]:
|
||||||
|
path_map = self._dedupe_existing_paths(raw_paths)
|
||||||
|
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
||||||
|
|
||||||
|
for original_path in unique_paths:
|
||||||
|
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||||
|
if real_path != original_path:
|
||||||
|
self.add_path_mapping(original_path, real_path)
|
||||||
|
|
||||||
|
return unique_paths
|
||||||
|
|
||||||
|
def _prepare_checkpoint_paths(
|
||||||
|
self, checkpoint_paths: Iterable[str], unet_paths: Iterable[str]
|
||||||
|
) -> List[str]:
|
||||||
|
checkpoint_map = self._dedupe_existing_paths(checkpoint_paths)
|
||||||
|
unet_map = self._dedupe_existing_paths(unet_paths)
|
||||||
|
|
||||||
|
merged_map: Dict[str, str] = {}
|
||||||
|
for real_path, original in {**checkpoint_map, **unet_map}.items():
|
||||||
|
if real_path not in merged_map:
|
||||||
|
merged_map[real_path] = original
|
||||||
|
|
||||||
|
unique_paths = sorted(merged_map.values(), key=lambda p: p.lower())
|
||||||
|
|
||||||
|
checkpoint_values = set(checkpoint_map.values())
|
||||||
|
unet_values = set(unet_map.values())
|
||||||
|
self.checkpoints_roots = [p for p in unique_paths if p in checkpoint_values]
|
||||||
|
self.unet_roots = [p for p in unique_paths if p in unet_values]
|
||||||
|
|
||||||
|
for original_path in unique_paths:
|
||||||
|
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||||
|
if real_path != original_path:
|
||||||
|
self.add_path_mapping(original_path, real_path)
|
||||||
|
|
||||||
|
return unique_paths
|
||||||
|
|
||||||
|
def _prepare_embedding_paths(self, raw_paths: Iterable[str]) -> List[str]:
|
||||||
|
path_map = self._dedupe_existing_paths(raw_paths)
|
||||||
|
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
||||||
|
|
||||||
|
for original_path in unique_paths:
|
||||||
|
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
||||||
|
if real_path != original_path:
|
||||||
|
self.add_path_mapping(original_path, real_path)
|
||||||
|
|
||||||
|
return unique_paths
|
||||||
|
|
||||||
|
def _apply_library_paths(self, folder_paths: Mapping[str, Iterable[str]]) -> None:
|
||||||
|
self._path_mappings.clear()
|
||||||
|
self._preview_root_paths = set()
|
||||||
|
|
||||||
|
lora_paths = folder_paths.get('loras', []) or []
|
||||||
|
checkpoint_paths = folder_paths.get('checkpoints', []) or []
|
||||||
|
unet_paths = folder_paths.get('unet', []) or []
|
||||||
|
embedding_paths = folder_paths.get('embeddings', []) or []
|
||||||
|
|
||||||
|
self.loras_roots = self._prepare_lora_paths(lora_paths)
|
||||||
|
self.base_models_roots = self._prepare_checkpoint_paths(checkpoint_paths, unet_paths)
|
||||||
|
self.embeddings_roots = self._prepare_embedding_paths(embedding_paths)
|
||||||
|
|
||||||
|
self._scan_symbolic_links()
|
||||||
|
self._rebuild_preview_roots()
|
||||||
|
|
||||||
def _init_lora_paths(self) -> List[str]:
|
def _init_lora_paths(self) -> List[str]:
|
||||||
"""Initialize and validate LoRA paths from ComfyUI settings"""
|
"""Initialize and validate LoRA paths from ComfyUI settings"""
|
||||||
try:
|
try:
|
||||||
raw_paths = folder_paths.get_folder_paths("loras")
|
raw_paths = folder_paths.get_folder_paths("loras")
|
||||||
|
unique_paths = self._prepare_lora_paths(raw_paths)
|
||||||
# Normalize and resolve symlinks, store mapping from resolved -> original
|
|
||||||
path_map = {}
|
|
||||||
for path in raw_paths:
|
|
||||||
if os.path.exists(path):
|
|
||||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
|
||||||
path_map[real_path] = path_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
|
|
||||||
|
|
||||||
# Now sort and use only the deduplicated real paths
|
|
||||||
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
|
||||||
logger.info("Found LoRA roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
logger.info("Found LoRA roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||||
|
|
||||||
if not unique_paths:
|
if not unique_paths:
|
||||||
logger.warning("No valid loras folders found in ComfyUI configuration")
|
logger.warning("No valid loras folders found in ComfyUI configuration")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for original_path in unique_paths:
|
|
||||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
|
||||||
if real_path != original_path:
|
|
||||||
self.add_path_mapping(original_path, real_path)
|
|
||||||
|
|
||||||
return unique_paths
|
return unique_paths
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error initializing LoRA paths: {e}")
|
logger.warning(f"Error initializing LoRA paths: {e}")
|
||||||
@@ -188,52 +364,17 @@ class Config:
|
|||||||
def _init_checkpoint_paths(self) -> List[str]:
|
def _init_checkpoint_paths(self) -> List[str]:
|
||||||
"""Initialize and validate checkpoint paths from ComfyUI settings"""
|
"""Initialize and validate checkpoint paths from ComfyUI settings"""
|
||||||
try:
|
try:
|
||||||
# Get checkpoint paths from folder_paths
|
|
||||||
raw_checkpoint_paths = folder_paths.get_folder_paths("checkpoints")
|
raw_checkpoint_paths = folder_paths.get_folder_paths("checkpoints")
|
||||||
raw_unet_paths = folder_paths.get_folder_paths("unet")
|
raw_unet_paths = folder_paths.get_folder_paths("unet")
|
||||||
|
unique_paths = self._prepare_checkpoint_paths(raw_checkpoint_paths, raw_unet_paths)
|
||||||
# Normalize and resolve symlinks for checkpoints, store mapping from resolved -> original
|
|
||||||
checkpoint_map = {}
|
|
||||||
for path in raw_checkpoint_paths:
|
|
||||||
if os.path.exists(path):
|
|
||||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
|
||||||
checkpoint_map[real_path] = checkpoint_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
|
|
||||||
|
|
||||||
# Normalize and resolve symlinks for unet, store mapping from resolved -> original
|
|
||||||
unet_map = {}
|
|
||||||
for path in raw_unet_paths:
|
|
||||||
if os.path.exists(path):
|
|
||||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
|
||||||
unet_map[real_path] = unet_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
|
|
||||||
|
|
||||||
# Merge both maps and deduplicate by real path
|
|
||||||
merged_map = {}
|
|
||||||
for real_path, orig_path in {**checkpoint_map, **unet_map}.items():
|
|
||||||
if real_path not in merged_map:
|
|
||||||
merged_map[real_path] = orig_path
|
|
||||||
|
|
||||||
# Now sort and use only the deduplicated real paths
|
logger.info("Found checkpoint roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||||
unique_paths = sorted(merged_map.values(), key=lambda p: p.lower())
|
|
||||||
|
if not unique_paths:
|
||||||
# Split back into checkpoints and unet roots for class properties
|
|
||||||
self.checkpoints_roots = [p for p in unique_paths if p in checkpoint_map.values()]
|
|
||||||
self.unet_roots = [p for p in unique_paths if p in unet_map.values()]
|
|
||||||
|
|
||||||
all_paths = unique_paths
|
|
||||||
|
|
||||||
logger.info("Found checkpoint roots:" + ("\n - " + "\n - ".join(all_paths) if all_paths else "[]"))
|
|
||||||
|
|
||||||
if not all_paths:
|
|
||||||
logger.warning("No valid checkpoint folders found in ComfyUI configuration")
|
logger.warning("No valid checkpoint folders found in ComfyUI configuration")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Initialize path mappings
|
return unique_paths
|
||||||
for original_path in all_paths:
|
|
||||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
|
||||||
if real_path != original_path:
|
|
||||||
self.add_path_mapping(original_path, real_path)
|
|
||||||
|
|
||||||
return all_paths
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error initializing checkpoint paths: {e}")
|
logger.warning(f"Error initializing checkpoint paths: {e}")
|
||||||
return []
|
return []
|
||||||
@@ -242,27 +383,13 @@ class Config:
|
|||||||
"""Initialize and validate embedding paths from ComfyUI settings"""
|
"""Initialize and validate embedding paths from ComfyUI settings"""
|
||||||
try:
|
try:
|
||||||
raw_paths = folder_paths.get_folder_paths("embeddings")
|
raw_paths = folder_paths.get_folder_paths("embeddings")
|
||||||
|
unique_paths = self._prepare_embedding_paths(raw_paths)
|
||||||
# Normalize and resolve symlinks, store mapping from resolved -> original
|
|
||||||
path_map = {}
|
|
||||||
for path in raw_paths:
|
|
||||||
if os.path.exists(path):
|
|
||||||
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
|
|
||||||
path_map[real_path] = path_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
|
|
||||||
|
|
||||||
# Now sort and use only the deduplicated real paths
|
|
||||||
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
|
|
||||||
logger.info("Found embedding roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
logger.info("Found embedding roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
|
||||||
|
|
||||||
if not unique_paths:
|
if not unique_paths:
|
||||||
logger.warning("No valid embeddings folders found in ComfyUI configuration")
|
logger.warning("No valid embeddings folders found in ComfyUI configuration")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for original_path in unique_paths:
|
|
||||||
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
|
|
||||||
if real_path != original_path:
|
|
||||||
self.add_path_mapping(original_path, real_path)
|
|
||||||
|
|
||||||
return unique_paths
|
return unique_paths
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error initializing embedding paths: {e}")
|
logger.warning(f"Error initializing embedding paths: {e}")
|
||||||
@@ -271,25 +398,61 @@ class Config:
|
|||||||
def get_preview_static_url(self, preview_path: str) -> str:
|
def get_preview_static_url(self, preview_path: str) -> str:
|
||||||
if not preview_path:
|
if not preview_path:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
real_path = os.path.realpath(preview_path).replace(os.sep, '/')
|
normalized = os.path.normpath(preview_path).replace(os.sep, '/')
|
||||||
|
encoded_path = urllib.parse.quote(normalized, safe='')
|
||||||
# Find longest matching path (most specific match)
|
return f'/api/lm/previews?path={encoded_path}'
|
||||||
best_match = ""
|
|
||||||
best_route = ""
|
def is_preview_path_allowed(self, preview_path: str) -> bool:
|
||||||
|
"""Return ``True`` if ``preview_path`` is within an allowed directory."""
|
||||||
for path, route in self._route_mappings.items():
|
|
||||||
if real_path.startswith(path) and len(path) > len(best_match):
|
if not preview_path:
|
||||||
best_match = path
|
return False
|
||||||
best_route = route
|
|
||||||
|
try:
|
||||||
if best_match:
|
candidate = Path(preview_path).expanduser().resolve(strict=False)
|
||||||
relative_path = os.path.relpath(real_path, best_match).replace(os.sep, '/')
|
except Exception:
|
||||||
safe_parts = [urllib.parse.quote(part) for part in relative_path.split('/')]
|
return False
|
||||||
safe_path = '/'.join(safe_parts)
|
|
||||||
return f'{best_route}/{safe_path}'
|
for root in self._preview_root_paths:
|
||||||
|
try:
|
||||||
return ""
|
candidate.relative_to(root)
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def apply_library_settings(self, library_config: Mapping[str, object]) -> None:
|
||||||
|
"""Update runtime paths to match the provided library configuration."""
|
||||||
|
folder_paths = library_config.get('folder_paths') if isinstance(library_config, Mapping) else {}
|
||||||
|
if not isinstance(folder_paths, Mapping):
|
||||||
|
folder_paths = {}
|
||||||
|
|
||||||
|
self._apply_library_paths(folder_paths)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Applied library settings with %d lora roots, %d checkpoint roots, and %d embedding roots",
|
||||||
|
len(self.loras_roots or []),
|
||||||
|
len(self.base_models_roots or []),
|
||||||
|
len(self.embeddings_roots or []),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_library_registry_snapshot(self) -> Dict[str, object]:
|
||||||
|
"""Return the current library registry and active library name."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .services.settings_manager import settings as settings_service
|
||||||
|
|
||||||
|
libraries = settings_service.get_libraries()
|
||||||
|
active_library = settings_service.get_active_library_name()
|
||||||
|
return {
|
||||||
|
"active_library": active_library,
|
||||||
|
"libraries": libraries,
|
||||||
|
}
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug("Failed to collect library registry snapshot: %s", exc)
|
||||||
|
return {"active_library": "", "libraries": {}}
|
||||||
|
|
||||||
# Global config instance
|
# Global config instance
|
||||||
config = Config()
|
config = Config()
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import asyncio
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
from server import PromptServer # type: ignore
|
from server import PromptServer # type: ignore
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
@@ -11,11 +10,13 @@ from .routes.recipe_routes import RecipeRoutes
|
|||||||
from .routes.stats_routes import StatsRoutes
|
from .routes.stats_routes import StatsRoutes
|
||||||
from .routes.update_routes import UpdateRoutes
|
from .routes.update_routes import UpdateRoutes
|
||||||
from .routes.misc_routes import MiscRoutes
|
from .routes.misc_routes import MiscRoutes
|
||||||
|
from .routes.preview_routes import PreviewRoutes
|
||||||
from .routes.example_images_routes import ExampleImagesRoutes
|
from .routes.example_images_routes import ExampleImagesRoutes
|
||||||
from .services.service_registry import ServiceRegistry
|
from .services.service_registry import ServiceRegistry
|
||||||
from .services.settings_manager import settings
|
from .services.settings_manager import settings
|
||||||
from .utils.example_images_migration import ExampleImagesMigration
|
from .utils.example_images_migration import ExampleImagesMigration
|
||||||
from .services.websocket_manager import ws_manager
|
from .services.websocket_manager import ws_manager
|
||||||
|
from .services.example_images_cleanup_service import ExampleImagesCleanupService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -49,102 +50,12 @@ class LoraManager:
|
|||||||
asyncio_logger = logging.getLogger("asyncio")
|
asyncio_logger = logging.getLogger("asyncio")
|
||||||
asyncio_logger.addFilter(ConnectionResetFilter())
|
asyncio_logger.addFilter(ConnectionResetFilter())
|
||||||
|
|
||||||
added_targets = set() # Track already added target paths
|
|
||||||
|
|
||||||
# Add static route for example images if the path exists in settings
|
# Add static route for example images if the path exists in settings
|
||||||
example_images_path = settings.get('example_images_path')
|
example_images_path = settings.get('example_images_path')
|
||||||
logger.info(f"Example images path: {example_images_path}")
|
logger.info(f"Example images path: {example_images_path}")
|
||||||
if example_images_path and os.path.exists(example_images_path):
|
if example_images_path and os.path.exists(example_images_path):
|
||||||
app.router.add_static('/example_images_static', example_images_path)
|
app.router.add_static('/example_images_static', example_images_path)
|
||||||
logger.info(f"Added static route for example images: /example_images_static -> {example_images_path}")
|
logger.info(f"Added static route for example images: /example_images_static -> {example_images_path}")
|
||||||
|
|
||||||
# Add static routes for each lora root
|
|
||||||
for idx, root in enumerate(config.loras_roots, start=1):
|
|
||||||
preview_path = f'/loras_static/root{idx}/preview'
|
|
||||||
|
|
||||||
real_root = root
|
|
||||||
if root in config._path_mappings.values():
|
|
||||||
for target, link in config._path_mappings.items():
|
|
||||||
if link == root:
|
|
||||||
real_root = target
|
|
||||||
break
|
|
||||||
# Add static route for original path
|
|
||||||
app.router.add_static(preview_path, real_root)
|
|
||||||
logger.info(f"Added static route {preview_path} -> {real_root}")
|
|
||||||
|
|
||||||
# Record route mapping
|
|
||||||
config.add_route_mapping(real_root, preview_path)
|
|
||||||
added_targets.add(real_root)
|
|
||||||
|
|
||||||
# Add static routes for each checkpoint root
|
|
||||||
for idx, root in enumerate(config.base_models_roots, start=1):
|
|
||||||
preview_path = f'/checkpoints_static/root{idx}/preview'
|
|
||||||
|
|
||||||
real_root = root
|
|
||||||
if root in config._path_mappings.values():
|
|
||||||
for target, link in config._path_mappings.items():
|
|
||||||
if link == root:
|
|
||||||
real_root = target
|
|
||||||
break
|
|
||||||
# Add static route for original path
|
|
||||||
app.router.add_static(preview_path, real_root)
|
|
||||||
logger.info(f"Added static route {preview_path} -> {real_root}")
|
|
||||||
|
|
||||||
# Record route mapping
|
|
||||||
config.add_route_mapping(real_root, preview_path)
|
|
||||||
added_targets.add(real_root)
|
|
||||||
|
|
||||||
# Add static routes for each embedding root
|
|
||||||
for idx, root in enumerate(config.embeddings_roots, start=1):
|
|
||||||
preview_path = f'/embeddings_static/root{idx}/preview'
|
|
||||||
|
|
||||||
real_root = root
|
|
||||||
if root in config._path_mappings.values():
|
|
||||||
for target, link in config._path_mappings.items():
|
|
||||||
if link == root:
|
|
||||||
real_root = target
|
|
||||||
break
|
|
||||||
# Add static route for original path
|
|
||||||
app.router.add_static(preview_path, real_root)
|
|
||||||
logger.info(f"Added static route {preview_path} -> {real_root}")
|
|
||||||
|
|
||||||
# Record route mapping
|
|
||||||
config.add_route_mapping(real_root, preview_path)
|
|
||||||
added_targets.add(real_root)
|
|
||||||
|
|
||||||
# Add static routes for symlink target paths
|
|
||||||
link_idx = {
|
|
||||||
'lora': 1,
|
|
||||||
'checkpoint': 1,
|
|
||||||
'embedding': 1
|
|
||||||
}
|
|
||||||
|
|
||||||
for target_path, link_path in config._path_mappings.items():
|
|
||||||
if target_path not in added_targets:
|
|
||||||
# Determine if this is a checkpoint, lora, or embedding link based on path
|
|
||||||
is_checkpoint = any(cp_root in link_path for cp_root in config.base_models_roots)
|
|
||||||
is_checkpoint = is_checkpoint or any(cp_root in target_path for cp_root in config.base_models_roots)
|
|
||||||
is_embedding = any(emb_root in link_path for emb_root in config.embeddings_roots)
|
|
||||||
is_embedding = is_embedding or any(emb_root in target_path for emb_root in config.embeddings_roots)
|
|
||||||
|
|
||||||
if is_checkpoint:
|
|
||||||
route_path = f'/checkpoints_static/link_{link_idx["checkpoint"]}/preview'
|
|
||||||
link_idx["checkpoint"] += 1
|
|
||||||
elif is_embedding:
|
|
||||||
route_path = f'/embeddings_static/link_{link_idx["embedding"]}/preview'
|
|
||||||
link_idx["embedding"] += 1
|
|
||||||
else:
|
|
||||||
route_path = f'/loras_static/link_{link_idx["lora"]}/preview'
|
|
||||||
link_idx["lora"] += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
app.router.add_static(route_path, Path(target_path).resolve(strict=False))
|
|
||||||
logger.info(f"Added static route for link target {route_path} -> {target_path}")
|
|
||||||
config.add_route_mapping(target_path, route_path)
|
|
||||||
added_targets.add(target_path)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Failed to add static route on initialization for {target_path}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add static route for locales JSON files
|
# Add static route for locales JSON files
|
||||||
if os.path.exists(config.i18n_path):
|
if os.path.exists(config.i18n_path):
|
||||||
@@ -166,7 +77,8 @@ class LoraManager:
|
|||||||
RecipeRoutes.setup_routes(app)
|
RecipeRoutes.setup_routes(app)
|
||||||
UpdateRoutes.setup_routes(app)
|
UpdateRoutes.setup_routes(app)
|
||||||
MiscRoutes.setup_routes(app)
|
MiscRoutes.setup_routes(app)
|
||||||
ExampleImagesRoutes.setup_routes(app)
|
ExampleImagesRoutes.setup_routes(app, ws_manager=ws_manager)
|
||||||
|
PreviewRoutes.setup_routes(app)
|
||||||
|
|
||||||
# Setup WebSocket routes that are shared across all model types
|
# Setup WebSocket routes that are shared across all model types
|
||||||
app.router.add_get('/ws/fetch-progress', ws_manager.handle_connection)
|
app.router.add_get('/ws/fetch-progress', ws_manager.handle_connection)
|
||||||
@@ -240,7 +152,6 @@ class LoraManager:
|
|||||||
# Run post-initialization tasks
|
# Run post-initialization tasks
|
||||||
post_tasks = [
|
post_tasks = [
|
||||||
asyncio.create_task(cls._cleanup_backup_files(), name='cleanup_bak_files'),
|
asyncio.create_task(cls._cleanup_backup_files(), name='cleanup_bak_files'),
|
||||||
asyncio.create_task(cls._cleanup_example_images_folders(), name='cleanup_example_images'),
|
|
||||||
# Add more post-initialization tasks here as needed
|
# Add more post-initialization tasks here as needed
|
||||||
# asyncio.create_task(cls._another_post_task(), name='another_task'),
|
# asyncio.create_task(cls._another_post_task(), name='another_task'),
|
||||||
]
|
]
|
||||||
@@ -352,117 +263,37 @@ class LoraManager:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _cleanup_example_images_folders(cls):
|
async def _cleanup_example_images_folders(cls):
|
||||||
"""Clean up invalid or empty folders in example images directory"""
|
"""Invoke the example images cleanup service for manual execution."""
|
||||||
try:
|
try:
|
||||||
example_images_path = settings.get('example_images_path')
|
service = ExampleImagesCleanupService()
|
||||||
if not example_images_path or not os.path.exists(example_images_path):
|
result = await service.cleanup_example_image_folders()
|
||||||
logger.debug("Example images path not configured or doesn't exist, skipping cleanup")
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.debug(f"Starting cleanup of example images folders in: {example_images_path}")
|
|
||||||
|
|
||||||
# Get all scanner instances to check hash validity
|
|
||||||
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
|
||||||
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
|
||||||
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
|
||||||
|
|
||||||
total_folders_checked = 0
|
|
||||||
empty_folders_removed = 0
|
|
||||||
invalid_hash_folders_removed = 0
|
|
||||||
|
|
||||||
# Scan the example images directory
|
|
||||||
try:
|
|
||||||
with os.scandir(example_images_path) as it:
|
|
||||||
for entry in it:
|
|
||||||
if not entry.is_dir(follow_symlinks=False):
|
|
||||||
continue
|
|
||||||
|
|
||||||
folder_name = entry.name
|
|
||||||
folder_path = entry.path
|
|
||||||
total_folders_checked += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if folder is empty
|
|
||||||
is_empty = cls._is_folder_empty(folder_path)
|
|
||||||
if is_empty:
|
|
||||||
logger.debug(f"Removing empty example images folder: {folder_name}")
|
|
||||||
await cls._remove_folder_safely(folder_path)
|
|
||||||
empty_folders_removed += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if folder name is a valid SHA256 hash (64 hex characters)
|
|
||||||
if len(folder_name) != 64 or not all(c in '0123456789abcdefABCDEF' for c in folder_name):
|
|
||||||
logger.debug(f"Removing invalid hash folder: {folder_name}")
|
|
||||||
await cls._remove_folder_safely(folder_path)
|
|
||||||
invalid_hash_folders_removed += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if hash exists in any of the scanners
|
|
||||||
hash_exists = (
|
|
||||||
lora_scanner.has_hash(folder_name) or
|
|
||||||
checkpoint_scanner.has_hash(folder_name) or
|
|
||||||
embedding_scanner.has_hash(folder_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not hash_exists:
|
|
||||||
logger.debug(f"Removing example images folder for deleted model: {folder_name}")
|
|
||||||
await cls._remove_folder_safely(folder_path)
|
|
||||||
invalid_hash_folders_removed += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
except Exception as e:
|
if result.get('success'):
|
||||||
logger.error(f"Error processing example images folder {folder_name}: {e}")
|
logger.debug(
|
||||||
|
"Manual example images cleanup completed: moved=%s",
|
||||||
# Yield control periodically
|
result.get('moved_total'),
|
||||||
await asyncio.sleep(0.01)
|
)
|
||||||
|
elif result.get('partial_success'):
|
||||||
except Exception as e:
|
logger.warning(
|
||||||
logger.error(f"Error scanning example images directory: {e}")
|
"Manual example images cleanup partially succeeded: moved=%s failures=%s",
|
||||||
return
|
result.get('moved_total'),
|
||||||
|
result.get('move_failures'),
|
||||||
# Log final cleanup report
|
)
|
||||||
total_removed = empty_folders_removed + invalid_hash_folders_removed
|
|
||||||
if total_removed > 0:
|
|
||||||
logger.info(f"Example images cleanup completed: checked {total_folders_checked} folders, "
|
|
||||||
f"removed {empty_folders_removed} empty folders and {invalid_hash_folders_removed} "
|
|
||||||
f"folders for deleted/invalid models (total: {total_removed} removed)")
|
|
||||||
else:
|
else:
|
||||||
logger.debug(f"Example images cleanup completed: checked {total_folders_checked} folders, "
|
logger.debug(
|
||||||
f"no cleanup needed")
|
"Manual example images cleanup skipped or failed: %s",
|
||||||
|
result.get('error', 'no changes'),
|
||||||
except Exception as e:
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e: # pragma: no cover - defensive guard
|
||||||
logger.error(f"Error during example images cleanup: {e}", exc_info=True)
|
logger.error(f"Error during example images cleanup: {e}", exc_info=True)
|
||||||
|
return {
|
||||||
@classmethod
|
'success': False,
|
||||||
def _is_folder_empty(cls, folder_path: str) -> bool:
|
'error': str(e),
|
||||||
"""Check if a folder is empty
|
'error_code': 'unexpected_error',
|
||||||
|
}
|
||||||
Args:
|
|
||||||
folder_path: Path to the folder to check
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if folder is empty, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
with os.scandir(folder_path) as it:
|
|
||||||
return not any(it)
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f"Error checking if folder is empty {folder_path}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _remove_folder_safely(cls, folder_path: str):
|
|
||||||
"""Safely remove a folder and all its contents
|
|
||||||
|
|
||||||
Args:
|
|
||||||
folder_path: Path to the folder to remove
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
import shutil
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
await loop.run_in_executor(None, shutil.rmtree, folder_path)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Failed to remove folder {folder_path}: {e}")
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _cleanup(cls, app):
|
async def _cleanup(cls, app):
|
||||||
@@ -470,11 +301,5 @@ class LoraManager:
|
|||||||
try:
|
try:
|
||||||
logger.info("LoRA Manager: Cleaning up services")
|
logger.info("LoRA Manager: Cleaning up services")
|
||||||
|
|
||||||
# Close CivitaiClient gracefully
|
|
||||||
civitai_client = await ServiceRegistry.get_service("civitai_client")
|
|
||||||
if civitai_client:
|
|
||||||
await civitai_client.close()
|
|
||||||
logger.info("Closed CivitaiClient connection")
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error during cleanup: {e}", exc_info=True)
|
logger.error(f"Error during cleanup: {e}", exc_info=True)
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import importlib
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# Check if running in standalone mode
|
# Check if running in standalone mode
|
||||||
standalone_mode = 'nodes' not in sys.modules
|
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||||
|
|
||||||
if not standalone_mode:
|
if not standalone_mode:
|
||||||
from .metadata_hook import MetadataHook
|
from .metadata_hook import MetadataHook
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
import os
|
||||||
from .constants import IMAGES
|
from .constants import IMAGES
|
||||||
|
|
||||||
# Check if running in standalone mode
|
# Check if running in standalone mode
|
||||||
standalone_mode = 'nodes' not in sys.modules
|
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||||
|
|
||||||
from .constants import MODELS, PROMPTS, SAMPLING, LORAS, SIZE, IS_SAMPLER
|
from .constants import MODELS, PROMPTS, SAMPLING, LORAS, SIZE, IS_SAMPLER
|
||||||
|
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ class LoraManagerLoader:
|
|||||||
formatted_loras = []
|
formatted_loras = []
|
||||||
for item in loaded_loras:
|
for item in loaded_loras:
|
||||||
parts = item.split(":")
|
parts = item.split(":")
|
||||||
lora_name = parts[0].strip()
|
lora_name = parts[0]
|
||||||
strength_parts = parts[1].strip().split(",")
|
strength_parts = parts[1].strip().split(",")
|
||||||
|
|
||||||
if len(strength_parts) > 1:
|
if len(strength_parts) > 1:
|
||||||
@@ -165,7 +165,7 @@ class LoraManagerTextLoader:
|
|||||||
|
|
||||||
loras = []
|
loras = []
|
||||||
for match in matches:
|
for match in matches:
|
||||||
lora_name = match[0].strip()
|
lora_name = match[0]
|
||||||
model_strength = float(match[1])
|
model_strength = float(match[1])
|
||||||
clip_strength = float(match[2]) if match[2] else model_strength
|
clip_strength = float(match[2]) if match[2] else model_strength
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ class RecipeMetadataParser(ABC):
|
|||||||
# Unpack the tuple to get the actual data
|
# Unpack the tuple to get the actual data
|
||||||
civitai_info, error_msg = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None)
|
civitai_info, error_msg = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None)
|
||||||
|
|
||||||
if not civitai_info or civitai_info.get("error") == "Model not found":
|
if not civitai_info or error_msg == "Model not found":
|
||||||
# Model not found or deleted
|
# Model not found or deleted
|
||||||
lora_entry['isDeleted'] = True
|
lora_entry['isDeleted'] = True
|
||||||
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
|||||||
result["base_model"] = metadata["baseModel"]
|
result["base_model"] = metadata["baseModel"]
|
||||||
elif "Model hash" in metadata and metadata_provider:
|
elif "Model hash" in metadata and metadata_provider:
|
||||||
model_hash = metadata["Model hash"]
|
model_hash = metadata["Model hash"]
|
||||||
model_info = await metadata_provider.get_model_by_hash(model_hash)
|
model_info, error = await metadata_provider.get_model_by_hash(model_hash)
|
||||||
if model_info:
|
if model_info:
|
||||||
result["base_model"] = model_info.get("baseModel", "")
|
result["base_model"] = model_info.get("baseModel", "")
|
||||||
elif "Model" in metadata and isinstance(metadata.get("resources"), list):
|
elif "Model" in metadata and isinstance(metadata.get("resources"), list):
|
||||||
@@ -100,7 +100,7 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
|||||||
if resource.get("type") == "model" and resource.get("name") == metadata.get("Model"):
|
if resource.get("type") == "model" and resource.get("name") == metadata.get("Model"):
|
||||||
# This is likely the checkpoint model
|
# This is likely the checkpoint model
|
||||||
if metadata_provider and resource.get("hash"):
|
if metadata_provider and resource.get("hash"):
|
||||||
model_info = await metadata_provider.get_model_by_hash(resource.get("hash"))
|
model_info, error = await metadata_provider.get_model_by_hash(resource.get("hash"))
|
||||||
if model_info:
|
if model_info:
|
||||||
result["base_model"] = model_info.get("baseModel", "")
|
result["base_model"] = model_info.get("baseModel", "")
|
||||||
|
|
||||||
@@ -201,11 +201,7 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
|||||||
if version_id and metadata_provider:
|
if version_id and metadata_provider:
|
||||||
try:
|
try:
|
||||||
# Use get_model_version_info instead of get_model_version
|
# Use get_model_version_info instead of get_model_version
|
||||||
civitai_info, error = await metadata_provider.get_model_version_info(version_id)
|
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||||
|
|
||||||
if error:
|
|
||||||
logger.warning(f"Error getting model version info: {error}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
populated_entry = await self.populate_lora_from_civitai(
|
populated_entry = await self.populate_lora_from_civitai(
|
||||||
lora_entry,
|
lora_entry,
|
||||||
@@ -267,31 +263,80 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
|||||||
if version_id and metadata_provider:
|
if version_id and metadata_provider:
|
||||||
try:
|
try:
|
||||||
# Use get_model_version_info with the version ID
|
# Use get_model_version_info with the version ID
|
||||||
civitai_info, error = await metadata_provider.get_model_version_info(version_id)
|
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||||
|
|
||||||
if error:
|
populated_entry = await self.populate_lora_from_civitai(
|
||||||
logger.warning(f"Error getting model version info: {error}")
|
lora_entry,
|
||||||
else:
|
civitai_info,
|
||||||
populated_entry = await self.populate_lora_from_civitai(
|
recipe_scanner,
|
||||||
lora_entry,
|
base_model_counts
|
||||||
civitai_info,
|
)
|
||||||
recipe_scanner,
|
|
||||||
base_model_counts
|
if populated_entry is None:
|
||||||
)
|
continue # Skip invalid LoRA types
|
||||||
|
|
||||||
if populated_entry is None:
|
lora_entry = populated_entry
|
||||||
continue # Skip invalid LoRA types
|
|
||||||
|
# Track this LoRA for deduplication
|
||||||
lora_entry = populated_entry
|
if version_id:
|
||||||
|
added_loras[version_id] = len(result["loras"])
|
||||||
# Track this LoRA for deduplication
|
|
||||||
if version_id:
|
|
||||||
added_loras[version_id] = len(result["loras"])
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error fetching Civitai info for model ID {version_id}: {e}")
|
logger.error(f"Error fetching Civitai info for model ID {version_id}: {e}")
|
||||||
|
|
||||||
result["loras"].append(lora_entry)
|
result["loras"].append(lora_entry)
|
||||||
|
|
||||||
|
# If we found LoRA hashes in the metadata but haven't already
|
||||||
|
# populated entries for them, fall back to creating LoRAs from
|
||||||
|
# the hashes section. Some Civitai image responses only include
|
||||||
|
# LoRA information here without explicit resources entries.
|
||||||
|
for lora_name, lora_hash in lora_hashes.items():
|
||||||
|
if not lora_hash:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip LoRAs we've already added via resources or other fields
|
||||||
|
if lora_hash in added_loras:
|
||||||
|
continue
|
||||||
|
|
||||||
|
lora_entry = {
|
||||||
|
'name': lora_name,
|
||||||
|
'type': "lora",
|
||||||
|
'weight': 1.0,
|
||||||
|
'hash': lora_hash,
|
||||||
|
'existsLocally': False,
|
||||||
|
'localPath': None,
|
||||||
|
'file_name': lora_name,
|
||||||
|
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||||
|
'baseModel': '',
|
||||||
|
'size': 0,
|
||||||
|
'downloadUrl': '',
|
||||||
|
'isDeleted': False
|
||||||
|
}
|
||||||
|
|
||||||
|
if metadata_provider:
|
||||||
|
try:
|
||||||
|
civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
|
||||||
|
|
||||||
|
populated_entry = await self.populate_lora_from_civitai(
|
||||||
|
lora_entry,
|
||||||
|
civitai_info,
|
||||||
|
recipe_scanner,
|
||||||
|
base_model_counts,
|
||||||
|
lora_hash
|
||||||
|
)
|
||||||
|
|
||||||
|
if populated_entry is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
lora_entry = populated_entry
|
||||||
|
|
||||||
|
if 'id' in lora_entry and lora_entry['id']:
|
||||||
|
added_loras[str(lora_entry['id'])] = len(result["loras"])
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching Civitai info for LoRA hash {lora_hash}: {e}")
|
||||||
|
|
||||||
|
added_loras[lora_hash] = len(result["loras"])
|
||||||
|
result["loras"].append(lora_entry)
|
||||||
|
|
||||||
# Check for LoRA info in the format "Lora_0 Model hash", "Lora_0 Model name", etc.
|
# Check for LoRA info in the format "Lora_0 Model hash", "Lora_0 Model name", etc.
|
||||||
lora_index = 0
|
lora_index = 0
|
||||||
while f"Lora_{lora_index} Model hash" in metadata and f"Lora_{lora_index} Model name" in metadata:
|
while f"Lora_{lora_index} Model hash" in metadata and f"Lora_{lora_index} Model name" in metadata:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
217
py/routes/base_recipe_routes.py
Normal file
217
py/routes/base_recipe_routes.py
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
"""Base infrastructure shared across recipe routes."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Callable, Mapping
|
||||||
|
|
||||||
|
import jinja2
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ..config import config
|
||||||
|
from ..recipes import RecipeParserFactory
|
||||||
|
from ..services.downloader import get_downloader
|
||||||
|
from ..services.recipes import (
|
||||||
|
RecipeAnalysisService,
|
||||||
|
RecipePersistenceService,
|
||||||
|
RecipeSharingService,
|
||||||
|
)
|
||||||
|
from ..services.server_i18n import server_i18n
|
||||||
|
from ..services.service_registry import ServiceRegistry
|
||||||
|
from ..services.settings_manager import settings
|
||||||
|
from ..utils.constants import CARD_PREVIEW_WIDTH
|
||||||
|
from ..utils.exif_utils import ExifUtils
|
||||||
|
from .handlers.recipe_handlers import (
|
||||||
|
RecipeAnalysisHandler,
|
||||||
|
RecipeHandlerSet,
|
||||||
|
RecipeListingHandler,
|
||||||
|
RecipeManagementHandler,
|
||||||
|
RecipePageView,
|
||||||
|
RecipeQueryHandler,
|
||||||
|
RecipeSharingHandler,
|
||||||
|
)
|
||||||
|
from .recipe_route_registrar import ROUTE_DEFINITIONS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRecipeRoutes:
|
||||||
|
"""Common dependency and startup wiring for recipe routes."""
|
||||||
|
|
||||||
|
_HANDLER_NAMES: tuple[str, ...] = tuple(
|
||||||
|
definition.handler_name for definition in ROUTE_DEFINITIONS
|
||||||
|
)
|
||||||
|
|
||||||
|
template_name: str = "recipes.html"
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.recipe_scanner = None
|
||||||
|
self.lora_scanner = None
|
||||||
|
self.civitai_client = None
|
||||||
|
self.settings = settings
|
||||||
|
self.server_i18n = server_i18n
|
||||||
|
self.template_env = jinja2.Environment(
|
||||||
|
loader=jinja2.FileSystemLoader(config.templates_path),
|
||||||
|
autoescape=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._i18n_registered = False
|
||||||
|
self._startup_hooks_registered = False
|
||||||
|
self._handler_set: RecipeHandlerSet | None = None
|
||||||
|
self._handler_mapping: dict[str, Callable] | None = None
|
||||||
|
|
||||||
|
async def attach_dependencies(self, app: web.Application | None = None) -> None:
|
||||||
|
"""Resolve shared services from the registry."""
|
||||||
|
|
||||||
|
await self._ensure_services()
|
||||||
|
self._ensure_i18n_filter()
|
||||||
|
|
||||||
|
async def ensure_dependencies_ready(self) -> None:
|
||||||
|
"""Ensure dependencies are available for request handlers."""
|
||||||
|
|
||||||
|
if self.recipe_scanner is None or self.civitai_client is None:
|
||||||
|
await self.attach_dependencies()
|
||||||
|
|
||||||
|
def register_startup_hooks(self, app: web.Application) -> None:
|
||||||
|
"""Register startup hooks once for dependency wiring."""
|
||||||
|
|
||||||
|
if self._startup_hooks_registered:
|
||||||
|
return
|
||||||
|
|
||||||
|
app.on_startup.append(self.attach_dependencies)
|
||||||
|
app.on_startup.append(self.prewarm_cache)
|
||||||
|
self._startup_hooks_registered = True
|
||||||
|
|
||||||
|
async def prewarm_cache(self, app: web.Application | None = None) -> None:
|
||||||
|
"""Pre-load recipe and LoRA caches on startup."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.attach_dependencies(app)
|
||||||
|
|
||||||
|
if self.lora_scanner is not None:
|
||||||
|
await self.lora_scanner.get_cached_data()
|
||||||
|
hash_index = getattr(self.lora_scanner, "_hash_index", None)
|
||||||
|
if hash_index is not None and hasattr(hash_index, "_hash_to_path"):
|
||||||
|
_ = len(hash_index._hash_to_path)
|
||||||
|
|
||||||
|
if self.recipe_scanner is not None:
|
||||||
|
await self.recipe_scanner.get_cached_data(force_refresh=True)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Error pre-warming recipe cache: %s", exc, exc_info=True)
|
||||||
|
|
||||||
|
def to_route_mapping(self) -> Mapping[str, Callable]:
|
||||||
|
"""Return a mapping of handler name to coroutine for registrar binding."""
|
||||||
|
|
||||||
|
if self._handler_mapping is None:
|
||||||
|
handler_set = self._create_handler_set()
|
||||||
|
self._handler_set = handler_set
|
||||||
|
self._handler_mapping = handler_set.to_route_mapping()
|
||||||
|
return self._handler_mapping
|
||||||
|
|
||||||
|
# Internal helpers -------------------------------------------------
|
||||||
|
|
||||||
|
async def _ensure_services(self) -> None:
|
||||||
|
if self.recipe_scanner is None:
|
||||||
|
self.recipe_scanner = await ServiceRegistry.get_recipe_scanner()
|
||||||
|
self.lora_scanner = getattr(self.recipe_scanner, "_lora_scanner", None)
|
||||||
|
|
||||||
|
if self.civitai_client is None:
|
||||||
|
self.civitai_client = await ServiceRegistry.get_civitai_client()
|
||||||
|
|
||||||
|
def _ensure_i18n_filter(self) -> None:
|
||||||
|
if not self._i18n_registered:
|
||||||
|
self.template_env.filters["t"] = self.server_i18n.create_template_filter()
|
||||||
|
self._i18n_registered = True
|
||||||
|
|
||||||
|
def get_handler_owner(self):
|
||||||
|
"""Return the object supplying bound handler coroutines."""
|
||||||
|
|
||||||
|
if self._handler_set is None:
|
||||||
|
self._handler_set = self._create_handler_set()
|
||||||
|
return self._handler_set
|
||||||
|
|
||||||
|
def _create_handler_set(self) -> RecipeHandlerSet:
|
||||||
|
recipe_scanner_getter = lambda: self.recipe_scanner
|
||||||
|
civitai_client_getter = lambda: self.civitai_client
|
||||||
|
|
||||||
|
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||||
|
if not standalone_mode:
|
||||||
|
from ..metadata_collector import get_metadata # type: ignore[import-not-found]
|
||||||
|
from ..metadata_collector.metadata_processor import ( # type: ignore[import-not-found]
|
||||||
|
MetadataProcessor,
|
||||||
|
)
|
||||||
|
from ..metadata_collector.metadata_registry import ( # type: ignore[import-not-found]
|
||||||
|
MetadataRegistry,
|
||||||
|
)
|
||||||
|
else: # pragma: no cover - optional dependency path
|
||||||
|
get_metadata = None # type: ignore[assignment]
|
||||||
|
MetadataProcessor = None # type: ignore[assignment]
|
||||||
|
MetadataRegistry = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
analysis_service = RecipeAnalysisService(
|
||||||
|
exif_utils=ExifUtils,
|
||||||
|
recipe_parser_factory=RecipeParserFactory,
|
||||||
|
downloader_factory=get_downloader,
|
||||||
|
metadata_collector=get_metadata,
|
||||||
|
metadata_processor_cls=MetadataProcessor,
|
||||||
|
metadata_registry_cls=MetadataRegistry,
|
||||||
|
standalone_mode=standalone_mode,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
persistence_service = RecipePersistenceService(
|
||||||
|
exif_utils=ExifUtils,
|
||||||
|
card_preview_width=CARD_PREVIEW_WIDTH,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
sharing_service = RecipeSharingService(logger=logger)
|
||||||
|
|
||||||
|
page_view = RecipePageView(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
settings_service=self.settings,
|
||||||
|
server_i18n=self.server_i18n,
|
||||||
|
template_env=self.template_env,
|
||||||
|
template_name=self.template_name,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
listing = RecipeListingHandler(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
query = RecipeQueryHandler(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
format_recipe_file_url=listing.format_recipe_file_url,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
management = RecipeManagementHandler(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
logger=logger,
|
||||||
|
persistence_service=persistence_service,
|
||||||
|
analysis_service=analysis_service,
|
||||||
|
)
|
||||||
|
analysis = RecipeAnalysisHandler(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
civitai_client_getter=civitai_client_getter,
|
||||||
|
logger=logger,
|
||||||
|
analysis_service=analysis_service,
|
||||||
|
)
|
||||||
|
sharing = RecipeSharingHandler(
|
||||||
|
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||||
|
recipe_scanner_getter=recipe_scanner_getter,
|
||||||
|
logger=logger,
|
||||||
|
sharing_service=sharing_service,
|
||||||
|
)
|
||||||
|
|
||||||
|
return RecipeHandlerSet(
|
||||||
|
page_view=page_view,
|
||||||
|
listing=listing,
|
||||||
|
query=query,
|
||||||
|
management=management,
|
||||||
|
analysis=analysis,
|
||||||
|
sharing=sharing,
|
||||||
|
)
|
||||||
|
|
||||||
@@ -2,9 +2,9 @@ import logging
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from .base_model_routes import BaseModelRoutes
|
from .base_model_routes import BaseModelRoutes
|
||||||
|
from .model_route_registrar import ModelRouteRegistrar
|
||||||
from ..services.checkpoint_service import CheckpointService
|
from ..services.checkpoint_service import CheckpointService
|
||||||
from ..services.service_registry import ServiceRegistry
|
from ..services.service_registry import ServiceRegistry
|
||||||
from ..services.metadata_service import get_default_metadata_provider
|
|
||||||
from ..config import config
|
from ..config import config
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -14,8 +14,7 @@ class CheckpointRoutes(BaseModelRoutes):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize Checkpoint routes with Checkpoint service"""
|
"""Initialize Checkpoint routes with Checkpoint service"""
|
||||||
# Service will be initialized later via setup_routes
|
super().__init__()
|
||||||
self.service = None
|
|
||||||
self.template_name = "checkpoints.html"
|
self.template_name = "checkpoints.html"
|
||||||
|
|
||||||
async def initialize_services(self):
|
async def initialize_services(self):
|
||||||
@@ -23,8 +22,8 @@ class CheckpointRoutes(BaseModelRoutes):
|
|||||||
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||||
self.service = CheckpointService(checkpoint_scanner)
|
self.service = CheckpointService(checkpoint_scanner)
|
||||||
|
|
||||||
# Initialize parent with the service
|
# Attach service dependencies
|
||||||
super().__init__(self.service)
|
self.attach_service(self.service)
|
||||||
|
|
||||||
def setup_routes(self, app: web.Application):
|
def setup_routes(self, app: web.Application):
|
||||||
"""Setup Checkpoint routes"""
|
"""Setup Checkpoint routes"""
|
||||||
@@ -34,17 +33,22 @@ class CheckpointRoutes(BaseModelRoutes):
|
|||||||
# Setup common routes with 'checkpoints' prefix (includes page route)
|
# Setup common routes with 'checkpoints' prefix (includes page route)
|
||||||
super().setup_routes(app, 'checkpoints')
|
super().setup_routes(app, 'checkpoints')
|
||||||
|
|
||||||
def setup_specific_routes(self, app: web.Application, prefix: str):
|
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
|
||||||
"""Setup Checkpoint-specific routes"""
|
"""Setup Checkpoint-specific routes"""
|
||||||
# Checkpoint-specific CivitAI integration
|
|
||||||
app.router.add_get(f'/api/{prefix}/civitai/versions/{{model_id}}', self.get_civitai_versions_checkpoint)
|
|
||||||
|
|
||||||
# Checkpoint info by name
|
# Checkpoint info by name
|
||||||
app.router.add_get(f'/api/{prefix}/info/{{name}}', self.get_checkpoint_info)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/info/{name}', prefix, self.get_checkpoint_info)
|
||||||
|
|
||||||
# Checkpoint roots and Unet roots
|
# Checkpoint roots and Unet roots
|
||||||
app.router.add_get(f'/api/{prefix}/checkpoints_roots', self.get_checkpoints_roots)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/checkpoints_roots', prefix, self.get_checkpoints_roots)
|
||||||
app.router.add_get(f'/api/{prefix}/unet_roots', self.get_unet_roots)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/unet_roots', prefix, self.get_unet_roots)
|
||||||
|
|
||||||
|
def _validate_civitai_model_type(self, model_type: str) -> bool:
|
||||||
|
"""Validate CivitAI model type for Checkpoint"""
|
||||||
|
return model_type.lower() == 'checkpoint'
|
||||||
|
|
||||||
|
def _get_expected_model_types(self) -> str:
|
||||||
|
"""Get expected model types string for error messages"""
|
||||||
|
return "Checkpoint"
|
||||||
|
|
||||||
async def get_checkpoint_info(self, request: web.Request) -> web.Response:
|
async def get_checkpoint_info(self, request: web.Request) -> web.Response:
|
||||||
"""Get detailed information for a specific checkpoint by name"""
|
"""Get detailed information for a specific checkpoint by name"""
|
||||||
@@ -61,54 +65,6 @@ class CheckpointRoutes(BaseModelRoutes):
|
|||||||
logger.error(f"Error in get_checkpoint_info: {e}", exc_info=True)
|
logger.error(f"Error in get_checkpoint_info: {e}", exc_info=True)
|
||||||
return web.json_response({"error": str(e)}, status=500)
|
return web.json_response({"error": str(e)}, status=500)
|
||||||
|
|
||||||
async def get_civitai_versions_checkpoint(self, request: web.Request) -> web.Response:
|
|
||||||
"""Get available versions for a Civitai checkpoint model with local availability info"""
|
|
||||||
try:
|
|
||||||
model_id = request.match_info['model_id']
|
|
||||||
metadata_provider = await get_default_metadata_provider()
|
|
||||||
response = await metadata_provider.get_model_versions(model_id)
|
|
||||||
if not response or not response.get('modelVersions'):
|
|
||||||
return web.Response(status=404, text="Model not found")
|
|
||||||
|
|
||||||
versions = response.get('modelVersions', [])
|
|
||||||
model_type = response.get('type', '')
|
|
||||||
|
|
||||||
# Check model type - should be Checkpoint
|
|
||||||
if model_type.lower() != 'checkpoint':
|
|
||||||
return web.json_response({
|
|
||||||
'error': f"Model type mismatch. Expected Checkpoint, got {model_type}"
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
# Check local availability for each version
|
|
||||||
for version in versions:
|
|
||||||
# Find the primary model file (type="Model" and primary=true) in the files list
|
|
||||||
model_file = next((file for file in version.get('files', [])
|
|
||||||
if file.get('type') == 'Model' and file.get('primary') == True), None)
|
|
||||||
|
|
||||||
# If no primary file found, try to find any model file
|
|
||||||
if not model_file:
|
|
||||||
model_file = next((file for file in version.get('files', [])
|
|
||||||
if file.get('type') == 'Model'), None)
|
|
||||||
|
|
||||||
if model_file:
|
|
||||||
sha256 = model_file.get('hashes', {}).get('SHA256')
|
|
||||||
if sha256:
|
|
||||||
# Set existsLocally and localPath at the version level
|
|
||||||
version['existsLocally'] = self.service.has_hash(sha256)
|
|
||||||
if version['existsLocally']:
|
|
||||||
version['localPath'] = self.service.get_path_by_hash(sha256)
|
|
||||||
|
|
||||||
# Also set the model file size at the version level for easier access
|
|
||||||
version['modelSizeKB'] = model_file.get('sizeKB')
|
|
||||||
else:
|
|
||||||
# No model file found in this version
|
|
||||||
version['existsLocally'] = False
|
|
||||||
|
|
||||||
return web.json_response(versions)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching checkpoint model versions: {e}")
|
|
||||||
return web.Response(status=500, text=str(e))
|
|
||||||
|
|
||||||
async def get_checkpoints_roots(self, request: web.Request) -> web.Response:
|
async def get_checkpoints_roots(self, request: web.Request) -> web.Response:
|
||||||
"""Return the list of checkpoint roots from config"""
|
"""Return the list of checkpoint roots from config"""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -2,9 +2,9 @@ import logging
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from .base_model_routes import BaseModelRoutes
|
from .base_model_routes import BaseModelRoutes
|
||||||
|
from .model_route_registrar import ModelRouteRegistrar
|
||||||
from ..services.embedding_service import EmbeddingService
|
from ..services.embedding_service import EmbeddingService
|
||||||
from ..services.service_registry import ServiceRegistry
|
from ..services.service_registry import ServiceRegistry
|
||||||
from ..services.metadata_service import get_default_metadata_provider
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -13,8 +13,7 @@ class EmbeddingRoutes(BaseModelRoutes):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize Embedding routes with Embedding service"""
|
"""Initialize Embedding routes with Embedding service"""
|
||||||
# Service will be initialized later via setup_routes
|
super().__init__()
|
||||||
self.service = None
|
|
||||||
self.template_name = "embeddings.html"
|
self.template_name = "embeddings.html"
|
||||||
|
|
||||||
async def initialize_services(self):
|
async def initialize_services(self):
|
||||||
@@ -22,8 +21,8 @@ class EmbeddingRoutes(BaseModelRoutes):
|
|||||||
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
||||||
self.service = EmbeddingService(embedding_scanner)
|
self.service = EmbeddingService(embedding_scanner)
|
||||||
|
|
||||||
# Initialize parent with the service
|
# Attach service dependencies
|
||||||
super().__init__(self.service)
|
self.attach_service(self.service)
|
||||||
|
|
||||||
def setup_routes(self, app: web.Application):
|
def setup_routes(self, app: web.Application):
|
||||||
"""Setup Embedding routes"""
|
"""Setup Embedding routes"""
|
||||||
@@ -33,13 +32,18 @@ class EmbeddingRoutes(BaseModelRoutes):
|
|||||||
# Setup common routes with 'embeddings' prefix (includes page route)
|
# Setup common routes with 'embeddings' prefix (includes page route)
|
||||||
super().setup_routes(app, 'embeddings')
|
super().setup_routes(app, 'embeddings')
|
||||||
|
|
||||||
def setup_specific_routes(self, app: web.Application, prefix: str):
|
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
|
||||||
"""Setup Embedding-specific routes"""
|
"""Setup Embedding-specific routes"""
|
||||||
# Embedding-specific CivitAI integration
|
|
||||||
app.router.add_get(f'/api/{prefix}/civitai/versions/{{model_id}}', self.get_civitai_versions_embedding)
|
|
||||||
|
|
||||||
# Embedding info by name
|
# Embedding info by name
|
||||||
app.router.add_get(f'/api/{prefix}/info/{{name}}', self.get_embedding_info)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/info/{name}', prefix, self.get_embedding_info)
|
||||||
|
|
||||||
|
def _validate_civitai_model_type(self, model_type: str) -> bool:
|
||||||
|
"""Validate CivitAI model type for Embedding"""
|
||||||
|
return model_type.lower() == 'textualinversion'
|
||||||
|
|
||||||
|
def _get_expected_model_types(self) -> str:
|
||||||
|
"""Get expected model types string for error messages"""
|
||||||
|
return "TextualInversion"
|
||||||
|
|
||||||
async def get_embedding_info(self, request: web.Request) -> web.Response:
|
async def get_embedding_info(self, request: web.Request) -> web.Response:
|
||||||
"""Get detailed information for a specific embedding by name"""
|
"""Get detailed information for a specific embedding by name"""
|
||||||
@@ -55,51 +59,3 @@ class EmbeddingRoutes(BaseModelRoutes):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in get_embedding_info: {e}", exc_info=True)
|
logger.error(f"Error in get_embedding_info: {e}", exc_info=True)
|
||||||
return web.json_response({"error": str(e)}, status=500)
|
return web.json_response({"error": str(e)}, status=500)
|
||||||
|
|
||||||
async def get_civitai_versions_embedding(self, request: web.Request) -> web.Response:
|
|
||||||
"""Get available versions for a Civitai embedding model with local availability info"""
|
|
||||||
try:
|
|
||||||
model_id = request.match_info['model_id']
|
|
||||||
metadata_provider = await get_default_metadata_provider()
|
|
||||||
response = await metadata_provider.get_model_versions(model_id)
|
|
||||||
if not response or not response.get('modelVersions'):
|
|
||||||
return web.Response(status=404, text="Model not found")
|
|
||||||
|
|
||||||
versions = response.get('modelVersions', [])
|
|
||||||
model_type = response.get('type', '')
|
|
||||||
|
|
||||||
# Check model type - should be TextualInversion (Embedding)
|
|
||||||
if model_type.lower() not in ['textualinversion', 'embedding']:
|
|
||||||
return web.json_response({
|
|
||||||
'error': f"Model type mismatch. Expected TextualInversion/Embedding, got {model_type}"
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
# Check local availability for each version
|
|
||||||
for version in versions:
|
|
||||||
# Find the primary model file (type="Model" and primary=true) in the files list
|
|
||||||
model_file = next((file for file in version.get('files', [])
|
|
||||||
if file.get('type') == 'Model' and file.get('primary') == True), None)
|
|
||||||
|
|
||||||
# If no primary file found, try to find any model file
|
|
||||||
if not model_file:
|
|
||||||
model_file = next((file for file in version.get('files', [])
|
|
||||||
if file.get('type') == 'Model'), None)
|
|
||||||
|
|
||||||
if model_file:
|
|
||||||
sha256 = model_file.get('hashes', {}).get('SHA256')
|
|
||||||
if sha256:
|
|
||||||
# Set existsLocally and localPath at the version level
|
|
||||||
version['existsLocally'] = self.service.has_hash(sha256)
|
|
||||||
if version['existsLocally']:
|
|
||||||
version['localPath'] = self.service.get_path_by_hash(sha256)
|
|
||||||
|
|
||||||
# Also set the model file size at the version level for easier access
|
|
||||||
version['modelSizeKB'] = model_file.get('sizeKB')
|
|
||||||
else:
|
|
||||||
# No model file found in this version
|
|
||||||
version['existsLocally'] = False
|
|
||||||
|
|
||||||
return web.json_response(versions)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching embedding model versions: {e}")
|
|
||||||
return web.Response(status=500, text=str(e))
|
|
||||||
|
|||||||
62
py/routes/example_images_route_registrar.py
Normal file
62
py/routes/example_images_route_registrar.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
"""Route registrar for example image endpoints."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Iterable, Mapping
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RouteDefinition:
|
||||||
|
"""Declarative configuration for a HTTP route."""
|
||||||
|
|
||||||
|
method: str
|
||||||
|
path: str
|
||||||
|
handler_name: str
|
||||||
|
|
||||||
|
|
||||||
|
ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||||
|
RouteDefinition("POST", "/api/lm/download-example-images", "download_example_images"),
|
||||||
|
RouteDefinition("POST", "/api/lm/import-example-images", "import_example_images"),
|
||||||
|
RouteDefinition("GET", "/api/lm/example-images-status", "get_example_images_status"),
|
||||||
|
RouteDefinition("POST", "/api/lm/pause-example-images", "pause_example_images"),
|
||||||
|
RouteDefinition("POST", "/api/lm/resume-example-images", "resume_example_images"),
|
||||||
|
RouteDefinition("POST", "/api/lm/open-example-images-folder", "open_example_images_folder"),
|
||||||
|
RouteDefinition("GET", "/api/lm/example-image-files", "get_example_image_files"),
|
||||||
|
RouteDefinition("GET", "/api/lm/has-example-images", "has_example_images"),
|
||||||
|
RouteDefinition("POST", "/api/lm/delete-example-image", "delete_example_image"),
|
||||||
|
RouteDefinition("POST", "/api/lm/force-download-example-images", "force_download_example_images"),
|
||||||
|
RouteDefinition("POST", "/api/lm/cleanup-example-image-folders", "cleanup_example_image_folders"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesRouteRegistrar:
|
||||||
|
"""Bind declarative example image routes to an aiohttp router."""
|
||||||
|
|
||||||
|
_METHOD_MAP = {
|
||||||
|
"GET": "add_get",
|
||||||
|
"POST": "add_post",
|
||||||
|
"PUT": "add_put",
|
||||||
|
"DELETE": "add_delete",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, app: web.Application) -> None:
|
||||||
|
self._app = app
|
||||||
|
|
||||||
|
def register_routes(
|
||||||
|
self,
|
||||||
|
handler_lookup: Mapping[str, Callable[[web.Request], object]],
|
||||||
|
*,
|
||||||
|
definitions: Iterable[RouteDefinition] = ROUTE_DEFINITIONS,
|
||||||
|
) -> None:
|
||||||
|
"""Register each route definition using the supplied handlers."""
|
||||||
|
|
||||||
|
for definition in definitions:
|
||||||
|
handler = handler_lookup[definition.handler_name]
|
||||||
|
self._bind_route(definition.method, definition.path, handler)
|
||||||
|
|
||||||
|
def _bind_route(self, method: str, path: str, handler: Callable[[web.Request], object]) -> None:
|
||||||
|
add_method_name = self._METHOD_MAP[method.upper()]
|
||||||
|
add_method = getattr(self._app.router, add_method_name)
|
||||||
|
add_method(path, handler)
|
||||||
@@ -1,74 +1,88 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from ..utils.example_images_download_manager import DownloadManager
|
from typing import Callable, Mapping
|
||||||
from ..utils.example_images_processor import ExampleImagesProcessor
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from .example_images_route_registrar import ExampleImagesRouteRegistrar
|
||||||
|
from .handlers.example_images_handlers import (
|
||||||
|
ExampleImagesDownloadHandler,
|
||||||
|
ExampleImagesFileHandler,
|
||||||
|
ExampleImagesHandlerSet,
|
||||||
|
ExampleImagesManagementHandler,
|
||||||
|
)
|
||||||
|
from ..services.use_cases.example_images import (
|
||||||
|
DownloadExampleImagesUseCase,
|
||||||
|
ImportExampleImagesUseCase,
|
||||||
|
)
|
||||||
|
from ..utils.example_images_download_manager import (
|
||||||
|
DownloadManager,
|
||||||
|
get_default_download_manager,
|
||||||
|
)
|
||||||
from ..utils.example_images_file_manager import ExampleImagesFileManager
|
from ..utils.example_images_file_manager import ExampleImagesFileManager
|
||||||
from ..services.websocket_manager import ws_manager
|
from ..utils.example_images_processor import ExampleImagesProcessor
|
||||||
|
from ..services.example_images_cleanup_service import ExampleImagesCleanupService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ExampleImagesRoutes:
|
class ExampleImagesRoutes:
|
||||||
"""Routes for example images related functionality"""
|
"""Route controller for example image endpoints."""
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def setup_routes(app):
|
|
||||||
"""Register example images routes"""
|
|
||||||
app.router.add_post('/api/download-example-images', ExampleImagesRoutes.download_example_images)
|
|
||||||
app.router.add_post('/api/import-example-images', ExampleImagesRoutes.import_example_images)
|
|
||||||
app.router.add_get('/api/example-images-status', ExampleImagesRoutes.get_example_images_status)
|
|
||||||
app.router.add_post('/api/pause-example-images', ExampleImagesRoutes.pause_example_images)
|
|
||||||
app.router.add_post('/api/resume-example-images', ExampleImagesRoutes.resume_example_images)
|
|
||||||
app.router.add_post('/api/open-example-images-folder', ExampleImagesRoutes.open_example_images_folder)
|
|
||||||
app.router.add_get('/api/example-image-files', ExampleImagesRoutes.get_example_image_files)
|
|
||||||
app.router.add_get('/api/has-example-images', ExampleImagesRoutes.has_example_images)
|
|
||||||
app.router.add_post('/api/delete-example-image', ExampleImagesRoutes.delete_example_image)
|
|
||||||
app.router.add_post('/api/force-download-example-images', ExampleImagesRoutes.force_download_example_images)
|
|
||||||
|
|
||||||
@staticmethod
|
def __init__(
|
||||||
async def download_example_images(request):
|
self,
|
||||||
"""Download example images for models from Civitai"""
|
*,
|
||||||
return await DownloadManager.start_download(request)
|
ws_manager,
|
||||||
|
download_manager: DownloadManager | None = None,
|
||||||
|
processor=ExampleImagesProcessor,
|
||||||
|
file_manager=ExampleImagesFileManager,
|
||||||
|
cleanup_service: ExampleImagesCleanupService | None = None,
|
||||||
|
) -> None:
|
||||||
|
if ws_manager is None:
|
||||||
|
raise ValueError("ws_manager is required")
|
||||||
|
self._download_manager = download_manager or get_default_download_manager(ws_manager)
|
||||||
|
self._processor = processor
|
||||||
|
self._file_manager = file_manager
|
||||||
|
self._cleanup_service = cleanup_service or ExampleImagesCleanupService()
|
||||||
|
self._handler_set: ExampleImagesHandlerSet | None = None
|
||||||
|
self._handler_mapping: Mapping[str, Callable[[web.Request], web.StreamResponse]] | None = None
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
async def get_example_images_status(request):
|
def setup_routes(cls, app: web.Application, *, ws_manager) -> None:
|
||||||
"""Get the current status of example images download"""
|
"""Register routes on the given aiohttp application using default wiring."""
|
||||||
return await DownloadManager.get_status(request)
|
|
||||||
|
|
||||||
@staticmethod
|
controller = cls(ws_manager=ws_manager)
|
||||||
async def pause_example_images(request):
|
controller.register(app)
|
||||||
"""Pause the example images download"""
|
|
||||||
return await DownloadManager.pause_download(request)
|
|
||||||
|
|
||||||
@staticmethod
|
def register(self, app: web.Application) -> None:
|
||||||
async def resume_example_images(request):
|
"""Bind the controller's handlers to the aiohttp router."""
|
||||||
"""Resume the example images download"""
|
|
||||||
return await DownloadManager.resume_download(request)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def open_example_images_folder(request):
|
|
||||||
"""Open the example images folder for a specific model"""
|
|
||||||
return await ExampleImagesFileManager.open_folder(request)
|
|
||||||
|
|
||||||
@staticmethod
|
registrar = ExampleImagesRouteRegistrar(app)
|
||||||
async def get_example_image_files(request):
|
registrar.register_routes(self.to_route_mapping())
|
||||||
"""Get list of example image files for a specific model"""
|
|
||||||
return await ExampleImagesFileManager.get_files(request)
|
|
||||||
|
|
||||||
@staticmethod
|
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], web.StreamResponse]]:
|
||||||
async def import_example_images(request):
|
"""Return the registrar-compatible mapping of handler names to callables."""
|
||||||
"""Import local example images for a model"""
|
|
||||||
return await ExampleImagesProcessor.import_images(request)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def has_example_images(request):
|
|
||||||
"""Check if example images folder exists and is not empty for a model"""
|
|
||||||
return await ExampleImagesFileManager.has_images(request)
|
|
||||||
|
|
||||||
@staticmethod
|
if self._handler_mapping is None:
|
||||||
async def delete_example_image(request):
|
handler_set = self._build_handler_set()
|
||||||
"""Delete a custom example image for a model"""
|
self._handler_set = handler_set
|
||||||
return await ExampleImagesProcessor.delete_custom_image(request)
|
self._handler_mapping = handler_set.to_route_mapping()
|
||||||
|
return self._handler_mapping
|
||||||
|
|
||||||
@staticmethod
|
def _build_handler_set(self) -> ExampleImagesHandlerSet:
|
||||||
async def force_download_example_images(request):
|
logger.debug("Building ExampleImagesHandlerSet with %s, %s, %s", self._download_manager, self._processor, self._file_manager)
|
||||||
"""Force download example images for specific models"""
|
download_use_case = DownloadExampleImagesUseCase(download_manager=self._download_manager)
|
||||||
return await DownloadManager.start_force_download(request)
|
download_handler = ExampleImagesDownloadHandler(download_use_case, self._download_manager)
|
||||||
|
import_use_case = ImportExampleImagesUseCase(processor=self._processor)
|
||||||
|
management_handler = ExampleImagesManagementHandler(
|
||||||
|
import_use_case,
|
||||||
|
self._processor,
|
||||||
|
self._cleanup_service,
|
||||||
|
)
|
||||||
|
file_handler = ExampleImagesFileHandler(self._file_manager)
|
||||||
|
return ExampleImagesHandlerSet(
|
||||||
|
download=download_handler,
|
||||||
|
management=management_handler,
|
||||||
|
files=file_handler,
|
||||||
|
)
|
||||||
|
|||||||
159
py/routes/handlers/example_images_handlers.py
Normal file
159
py/routes/handlers/example_images_handlers.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
"""Handler set for example image routes."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Mapping
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ...services.use_cases.example_images import (
|
||||||
|
DownloadExampleImagesConfigurationError,
|
||||||
|
DownloadExampleImagesInProgressError,
|
||||||
|
DownloadExampleImagesUseCase,
|
||||||
|
ImportExampleImagesUseCase,
|
||||||
|
ImportExampleImagesValidationError,
|
||||||
|
)
|
||||||
|
from ...utils.example_images_download_manager import (
|
||||||
|
DownloadConfigurationError,
|
||||||
|
DownloadInProgressError,
|
||||||
|
DownloadNotRunningError,
|
||||||
|
ExampleImagesDownloadError,
|
||||||
|
)
|
||||||
|
from ...utils.example_images_processor import ExampleImagesImportError
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesDownloadHandler:
|
||||||
|
"""HTTP adapters for download-related example image endpoints."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
download_use_case: DownloadExampleImagesUseCase,
|
||||||
|
download_manager,
|
||||||
|
) -> None:
|
||||||
|
self._download_use_case = download_use_case
|
||||||
|
self._download_manager = download_manager
|
||||||
|
|
||||||
|
async def download_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
payload = await request.json()
|
||||||
|
result = await self._download_use_case.execute(payload)
|
||||||
|
return web.json_response(result)
|
||||||
|
except DownloadExampleImagesInProgressError as exc:
|
||||||
|
response = {
|
||||||
|
'success': False,
|
||||||
|
'error': str(exc),
|
||||||
|
'status': exc.progress,
|
||||||
|
}
|
||||||
|
return web.json_response(response, status=400)
|
||||||
|
except DownloadExampleImagesConfigurationError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=400)
|
||||||
|
except ExampleImagesDownloadError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_example_images_status(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
result = await self._download_manager.get_status(request)
|
||||||
|
return web.json_response(result)
|
||||||
|
|
||||||
|
async def pause_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
result = await self._download_manager.pause_download(request)
|
||||||
|
return web.json_response(result)
|
||||||
|
except DownloadNotRunningError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=400)
|
||||||
|
|
||||||
|
async def resume_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
result = await self._download_manager.resume_download(request)
|
||||||
|
return web.json_response(result)
|
||||||
|
except DownloadNotRunningError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=400)
|
||||||
|
|
||||||
|
async def force_download_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
payload = await request.json()
|
||||||
|
result = await self._download_manager.start_force_download(payload)
|
||||||
|
return web.json_response(result)
|
||||||
|
except DownloadInProgressError as exc:
|
||||||
|
response = {
|
||||||
|
'success': False,
|
||||||
|
'error': str(exc),
|
||||||
|
'status': exc.progress_snapshot,
|
||||||
|
}
|
||||||
|
return web.json_response(response, status=400)
|
||||||
|
except DownloadConfigurationError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=400)
|
||||||
|
except ExampleImagesDownloadError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesManagementHandler:
|
||||||
|
"""HTTP adapters for import/delete endpoints."""
|
||||||
|
|
||||||
|
def __init__(self, import_use_case: ImportExampleImagesUseCase, processor, cleanup_service) -> None:
|
||||||
|
self._import_use_case = import_use_case
|
||||||
|
self._processor = processor
|
||||||
|
self._cleanup_service = cleanup_service
|
||||||
|
|
||||||
|
async def import_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
result = await self._import_use_case.execute(request)
|
||||||
|
return web.json_response(result)
|
||||||
|
except ImportExampleImagesValidationError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=400)
|
||||||
|
except ExampleImagesImportError as exc:
|
||||||
|
return web.json_response({'success': False, 'error': str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def delete_example_image(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
return await self._processor.delete_custom_image(request)
|
||||||
|
|
||||||
|
async def cleanup_example_image_folders(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
result = await self._cleanup_service.cleanup_example_image_folders()
|
||||||
|
|
||||||
|
if result.get('success') or result.get('partial_success'):
|
||||||
|
return web.json_response(result, status=200)
|
||||||
|
|
||||||
|
error_code = result.get('error_code')
|
||||||
|
status = 400 if error_code in {'path_not_configured', 'path_not_found'} else 500
|
||||||
|
return web.json_response(result, status=status)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesFileHandler:
|
||||||
|
"""HTTP adapters for filesystem-centric endpoints."""
|
||||||
|
|
||||||
|
def __init__(self, file_manager) -> None:
|
||||||
|
self._file_manager = file_manager
|
||||||
|
|
||||||
|
async def open_example_images_folder(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
return await self._file_manager.open_folder(request)
|
||||||
|
|
||||||
|
async def get_example_image_files(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
return await self._file_manager.get_files(request)
|
||||||
|
|
||||||
|
async def has_example_images(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
return await self._file_manager.has_images(request)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ExampleImagesHandlerSet:
|
||||||
|
"""Aggregate of handlers exposed to the registrar."""
|
||||||
|
|
||||||
|
download: ExampleImagesDownloadHandler
|
||||||
|
management: ExampleImagesManagementHandler
|
||||||
|
files: ExampleImagesFileHandler
|
||||||
|
|
||||||
|
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], web.StreamResponse]]:
|
||||||
|
"""Flatten handler methods into the registrar mapping."""
|
||||||
|
|
||||||
|
return {
|
||||||
|
"download_example_images": self.download.download_example_images,
|
||||||
|
"get_example_images_status": self.download.get_example_images_status,
|
||||||
|
"pause_example_images": self.download.pause_example_images,
|
||||||
|
"resume_example_images": self.download.resume_example_images,
|
||||||
|
"force_download_example_images": self.download.force_download_example_images,
|
||||||
|
"import_example_images": self.management.import_example_images,
|
||||||
|
"delete_example_image": self.management.delete_example_image,
|
||||||
|
"cleanup_example_image_folders": self.management.cleanup_example_image_folders,
|
||||||
|
"open_example_images_folder": self.files.open_example_images_folder,
|
||||||
|
"get_example_image_files": self.files.get_example_image_files,
|
||||||
|
"has_example_images": self.files.has_example_images,
|
||||||
|
}
|
||||||
795
py/routes/handlers/misc_handlers.py
Normal file
795
py/routes/handlers/misc_handlers.py
Normal file
@@ -0,0 +1,795 @@
|
|||||||
|
"""Handlers for miscellaneous routes.
|
||||||
|
|
||||||
|
The legacy :mod:`py.routes.misc_routes` module bundled HTTP wiring and
|
||||||
|
business logic in a single class. This module mirrors the model route
|
||||||
|
architecture by splitting the responsibilities into dedicated handler
|
||||||
|
objects that can be composed by the route controller.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Awaitable, Callable, Dict, Mapping, Protocol
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ...config import config
|
||||||
|
from ...services.metadata_service import (
|
||||||
|
get_metadata_archive_manager,
|
||||||
|
update_metadata_providers,
|
||||||
|
)
|
||||||
|
from ...services.service_registry import ServiceRegistry
|
||||||
|
from ...services.settings_manager import settings as default_settings
|
||||||
|
from ...services.websocket_manager import ws_manager
|
||||||
|
from ...services.downloader import get_downloader
|
||||||
|
from ...utils.constants import DEFAULT_NODE_COLOR, NODE_TYPES, SUPPORTED_MEDIA_EXTENSIONS
|
||||||
|
from ...utils.example_images_paths import is_valid_example_images_root
|
||||||
|
from ...utils.lora_metadata import extract_trained_words
|
||||||
|
from ...utils.usage_stats import UsageStats
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PromptServerProtocol(Protocol):
|
||||||
|
"""Subset of PromptServer used by the handlers."""
|
||||||
|
|
||||||
|
instance: "PromptServerProtocol"
|
||||||
|
|
||||||
|
def send_sync(self, event: str, payload: dict) -> None: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class DownloaderProtocol(Protocol):
|
||||||
|
async def refresh_session(self) -> None: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class UsageStatsFactory(Protocol):
|
||||||
|
def __call__(self) -> UsageStats: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataProviderProtocol(Protocol):
|
||||||
|
async def get_model_versions(self, model_id: int) -> dict | None: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataArchiveManagerProtocol(Protocol):
|
||||||
|
async def download_and_extract_database(
|
||||||
|
self, progress_callback: Callable[[str, str], None]
|
||||||
|
) -> bool: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
async def remove_database(self) -> bool: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
def is_database_available(self) -> bool: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_database_path(self) -> str | None: # pragma: no cover - protocol
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class NodeRegistry:
|
||||||
|
"""Thread-safe registry for tracking LoRA nodes in active workflows."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._nodes: Dict[int, dict] = {}
|
||||||
|
self._registry_updated = asyncio.Event()
|
||||||
|
|
||||||
|
async def register_nodes(self, nodes: list[dict]) -> None:
|
||||||
|
async with self._lock:
|
||||||
|
self._nodes.clear()
|
||||||
|
for node in nodes:
|
||||||
|
node_id = node["node_id"]
|
||||||
|
node_type = node.get("type", "")
|
||||||
|
type_id = NODE_TYPES.get(node_type, 0)
|
||||||
|
bgcolor = node.get("bgcolor") or DEFAULT_NODE_COLOR
|
||||||
|
self._nodes[node_id] = {
|
||||||
|
"id": node_id,
|
||||||
|
"bgcolor": bgcolor,
|
||||||
|
"title": node.get("title"),
|
||||||
|
"type": type_id,
|
||||||
|
"type_name": node_type,
|
||||||
|
}
|
||||||
|
logger.debug("Registered %s nodes in registry", len(nodes))
|
||||||
|
self._registry_updated.set()
|
||||||
|
|
||||||
|
async def get_registry(self) -> dict:
|
||||||
|
async with self._lock:
|
||||||
|
return {
|
||||||
|
"nodes": dict(self._nodes),
|
||||||
|
"node_count": len(self._nodes),
|
||||||
|
}
|
||||||
|
|
||||||
|
async def wait_for_update(self, timeout: float = 1.0) -> bool:
|
||||||
|
self._registry_updated.clear()
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self._registry_updated.wait(), timeout=timeout)
|
||||||
|
return True
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class HealthCheckHandler:
|
||||||
|
async def health_check(self, request: web.Request) -> web.Response:
|
||||||
|
return web.json_response({"status": "ok"})
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsHandler:
|
||||||
|
"""Sync settings between backend and frontend."""
|
||||||
|
|
||||||
|
_SYNC_KEYS = (
|
||||||
|
"civitai_api_key",
|
||||||
|
"default_lora_root",
|
||||||
|
"default_checkpoint_root",
|
||||||
|
"default_embedding_root",
|
||||||
|
"base_model_path_mappings",
|
||||||
|
"download_path_templates",
|
||||||
|
"enable_metadata_archive_db",
|
||||||
|
"language",
|
||||||
|
"proxy_enabled",
|
||||||
|
"proxy_type",
|
||||||
|
"proxy_host",
|
||||||
|
"proxy_port",
|
||||||
|
"proxy_username",
|
||||||
|
"proxy_password",
|
||||||
|
"example_images_path",
|
||||||
|
"optimize_example_images",
|
||||||
|
"auto_download_example_images",
|
||||||
|
"blur_mature_content",
|
||||||
|
"autoplay_on_hover",
|
||||||
|
"display_density",
|
||||||
|
"card_info_display",
|
||||||
|
"include_trigger_words",
|
||||||
|
"show_only_sfw",
|
||||||
|
"compact_mode",
|
||||||
|
)
|
||||||
|
|
||||||
|
_PROXY_KEYS = {"proxy_enabled", "proxy_host", "proxy_port", "proxy_username", "proxy_password", "proxy_type"}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
settings_service=default_settings,
|
||||||
|
metadata_provider_updater: Callable[[], Awaitable[None]] = update_metadata_providers,
|
||||||
|
downloader_factory: Callable[[], Awaitable[DownloaderProtocol]] = get_downloader,
|
||||||
|
) -> None:
|
||||||
|
self._settings = settings_service
|
||||||
|
self._metadata_provider_updater = metadata_provider_updater
|
||||||
|
self._downloader_factory = downloader_factory
|
||||||
|
|
||||||
|
async def get_libraries(self, request: web.Request) -> web.Response:
|
||||||
|
"""Return the registered libraries and the active selection."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
snapshot = config.get_library_registry_snapshot()
|
||||||
|
libraries = snapshot.get("libraries", {})
|
||||||
|
active_library = snapshot.get("active_library", "")
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"libraries": libraries,
|
||||||
|
"active_library": active_library,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error getting library registry: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_settings(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
response_data = {}
|
||||||
|
for key in self._SYNC_KEYS:
|
||||||
|
value = self._settings.get(key)
|
||||||
|
if value is not None:
|
||||||
|
response_data[key] = value
|
||||||
|
return web.json_response({"success": True, "settings": response_data})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error getting settings: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def activate_library(self, request: web.Request) -> web.Response:
|
||||||
|
"""Activate the selected library."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error parsing activate library request: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": "Invalid JSON payload"}, status=400)
|
||||||
|
|
||||||
|
library_name = data.get("library") or data.get("library_name")
|
||||||
|
if not isinstance(library_name, str) or not library_name.strip():
|
||||||
|
return web.json_response(
|
||||||
|
{"success": False, "error": "Library name is required"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
normalized_name = library_name.strip()
|
||||||
|
self._settings.activate_library(normalized_name)
|
||||||
|
snapshot = config.get_library_registry_snapshot()
|
||||||
|
libraries = snapshot.get("libraries", {})
|
||||||
|
active_library = snapshot.get("active_library", "")
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"active_library": active_library,
|
||||||
|
"libraries": libraries,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except KeyError as exc:
|
||||||
|
logger.debug("Attempted to activate unknown library '%s'", library_name)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=404)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error activating library '%s': %s", library_name, exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def update_settings(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
proxy_changed = False
|
||||||
|
|
||||||
|
for key, value in data.items():
|
||||||
|
if value == self._settings.get(key):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key == "example_images_path" and value:
|
||||||
|
validation_error = self._validate_example_images_path(value)
|
||||||
|
if validation_error:
|
||||||
|
return web.json_response({"success": False, "error": validation_error})
|
||||||
|
|
||||||
|
if value == "__DELETE__" and key in ("proxy_username", "proxy_password"):
|
||||||
|
self._settings.delete(key)
|
||||||
|
else:
|
||||||
|
self._settings.set(key, value)
|
||||||
|
|
||||||
|
if key == "enable_metadata_archive_db":
|
||||||
|
await self._metadata_provider_updater()
|
||||||
|
|
||||||
|
if key in self._PROXY_KEYS:
|
||||||
|
proxy_changed = True
|
||||||
|
|
||||||
|
if proxy_changed:
|
||||||
|
downloader = await self._downloader_factory()
|
||||||
|
await downloader.refresh_session()
|
||||||
|
|
||||||
|
return web.json_response({"success": True})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error updating settings: %s", exc, exc_info=True)
|
||||||
|
return web.Response(status=500, text=str(exc))
|
||||||
|
|
||||||
|
def _validate_example_images_path(self, folder_path: str) -> str | None:
|
||||||
|
if not os.path.exists(folder_path):
|
||||||
|
return f"Path does not exist: {folder_path}"
|
||||||
|
if not os.path.isdir(folder_path):
|
||||||
|
return "Please set a dedicated folder for example images."
|
||||||
|
if not self._is_dedicated_example_images_folder(folder_path):
|
||||||
|
return "Please set a dedicated folder for example images."
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _is_dedicated_example_images_folder(self, folder_path: str) -> bool:
|
||||||
|
return is_valid_example_images_root(folder_path)
|
||||||
|
|
||||||
|
|
||||||
|
class UsageStatsHandler:
|
||||||
|
def __init__(self, usage_stats_factory: UsageStatsFactory = UsageStats) -> None:
|
||||||
|
self._usage_stats_factory = usage_stats_factory
|
||||||
|
|
||||||
|
async def update_usage_stats(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
prompt_id = data.get("prompt_id")
|
||||||
|
if not prompt_id:
|
||||||
|
return web.json_response({"success": False, "error": "Missing prompt_id"}, status=400)
|
||||||
|
usage_stats = self._usage_stats_factory()
|
||||||
|
await usage_stats.process_execution(prompt_id)
|
||||||
|
return web.json_response({"success": True})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to update usage stats: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_usage_stats(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
usage_stats = self._usage_stats_factory()
|
||||||
|
stats = await usage_stats.get_stats()
|
||||||
|
stats_response = {"success": True, "data": stats, "format_version": 2}
|
||||||
|
return web.json_response(stats_response)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to get usage stats: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class LoraCodeHandler:
|
||||||
|
def __init__(self, prompt_server: type[PromptServerProtocol]) -> None:
|
||||||
|
self._prompt_server = prompt_server
|
||||||
|
|
||||||
|
async def update_lora_code(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
node_ids = data.get("node_ids")
|
||||||
|
lora_code = data.get("lora_code", "")
|
||||||
|
mode = data.get("mode", "append")
|
||||||
|
|
||||||
|
if not lora_code:
|
||||||
|
return web.json_response({"success": False, "error": "Missing lora_code parameter"}, status=400)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if node_ids is None:
|
||||||
|
try:
|
||||||
|
self._prompt_server.instance.send_sync(
|
||||||
|
"lora_code_update", {"id": -1, "lora_code": lora_code, "mode": mode}
|
||||||
|
)
|
||||||
|
results.append({"node_id": "broadcast", "success": True})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error broadcasting lora code: %s", exc)
|
||||||
|
results.append({"node_id": "broadcast", "success": False, "error": str(exc)})
|
||||||
|
else:
|
||||||
|
for node_id in node_ids:
|
||||||
|
try:
|
||||||
|
self._prompt_server.instance.send_sync(
|
||||||
|
"lora_code_update",
|
||||||
|
{"id": node_id, "lora_code": lora_code, "mode": mode},
|
||||||
|
)
|
||||||
|
results.append({"node_id": node_id, "success": True})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error sending lora code to node %s: %s", node_id, exc)
|
||||||
|
results.append({"node_id": node_id, "success": False, "error": str(exc)})
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "results": results})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to update lora code: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class TrainedWordsHandler:
|
||||||
|
async def get_trained_words(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
file_path = request.query.get("file_path")
|
||||||
|
if not file_path:
|
||||||
|
return web.json_response({"success": False, "error": "Missing file_path parameter"}, status=400)
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
return web.json_response({"success": False, "error": "File not found"}, status=404)
|
||||||
|
if not file_path.endswith(".safetensors"):
|
||||||
|
return web.json_response({"success": False, "error": "File must be a safetensors file"}, status=400)
|
||||||
|
|
||||||
|
trained_words, class_tokens = await extract_trained_words(file_path)
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"trained_words": trained_words,
|
||||||
|
"class_tokens": class_tokens,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to get trained words: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelExampleFilesHandler:
|
||||||
|
async def get_model_example_files(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
model_path = request.query.get("model_path")
|
||||||
|
if not model_path:
|
||||||
|
return web.json_response({"success": False, "error": "Missing model_path parameter"}, status=400)
|
||||||
|
model_dir = os.path.dirname(model_path)
|
||||||
|
if not os.path.exists(model_dir):
|
||||||
|
return web.json_response({"success": False, "error": "Model directory not found"}, status=404)
|
||||||
|
|
||||||
|
base_name = os.path.splitext(os.path.basename(model_path))[0]
|
||||||
|
files = []
|
||||||
|
pattern = f"{base_name}.example."
|
||||||
|
for file in os.listdir(model_dir):
|
||||||
|
if not file.startswith(pattern):
|
||||||
|
continue
|
||||||
|
file_full_path = os.path.join(model_dir, file)
|
||||||
|
if not os.path.isfile(file_full_path):
|
||||||
|
continue
|
||||||
|
file_ext = os.path.splitext(file)[1].lower()
|
||||||
|
if file_ext not in SUPPORTED_MEDIA_EXTENSIONS["images"] and file_ext not in SUPPORTED_MEDIA_EXTENSIONS["videos"]:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
index = int(file[len(pattern) :].split(".")[0])
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
index = float("inf")
|
||||||
|
static_url = config.get_preview_static_url(file_full_path)
|
||||||
|
files.append(
|
||||||
|
{
|
||||||
|
"name": file,
|
||||||
|
"path": static_url,
|
||||||
|
"extension": file_ext,
|
||||||
|
"is_video": file_ext in SUPPORTED_MEDIA_EXTENSIONS["videos"],
|
||||||
|
"index": index,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
files.sort(key=lambda item: item["index"])
|
||||||
|
for file in files:
|
||||||
|
file.pop("index", None)
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "files": files})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to get model example files: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ServiceRegistryAdapter:
|
||||||
|
get_lora_scanner: Callable[[], Awaitable]
|
||||||
|
get_checkpoint_scanner: Callable[[], Awaitable]
|
||||||
|
get_embedding_scanner: Callable[[], Awaitable]
|
||||||
|
|
||||||
|
|
||||||
|
class ModelLibraryHandler:
|
||||||
|
def __init__(self, service_registry: ServiceRegistryAdapter, metadata_provider_factory: Callable[[], Awaitable[MetadataProviderProtocol | None]]) -> None:
|
||||||
|
self._service_registry = service_registry
|
||||||
|
self._metadata_provider_factory = metadata_provider_factory
|
||||||
|
|
||||||
|
async def check_model_exists(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
model_id_str = request.query.get("modelId")
|
||||||
|
model_version_id_str = request.query.get("modelVersionId")
|
||||||
|
if not model_id_str:
|
||||||
|
return web.json_response({"success": False, "error": "Missing required parameter: modelId"}, status=400)
|
||||||
|
try:
|
||||||
|
model_id = int(model_id_str)
|
||||||
|
except ValueError:
|
||||||
|
return web.json_response({"success": False, "error": "Parameter modelId must be an integer"}, status=400)
|
||||||
|
|
||||||
|
lora_scanner = await self._service_registry.get_lora_scanner()
|
||||||
|
checkpoint_scanner = await self._service_registry.get_checkpoint_scanner()
|
||||||
|
embedding_scanner = await self._service_registry.get_embedding_scanner()
|
||||||
|
|
||||||
|
if model_version_id_str:
|
||||||
|
try:
|
||||||
|
model_version_id = int(model_version_id_str)
|
||||||
|
except ValueError:
|
||||||
|
return web.json_response({"success": False, "error": "Parameter modelVersionId must be an integer"}, status=400)
|
||||||
|
|
||||||
|
exists = False
|
||||||
|
model_type = None
|
||||||
|
if await lora_scanner.check_model_version_exists(model_version_id):
|
||||||
|
exists = True
|
||||||
|
model_type = "lora"
|
||||||
|
elif checkpoint_scanner and await checkpoint_scanner.check_model_version_exists(model_version_id):
|
||||||
|
exists = True
|
||||||
|
model_type = "checkpoint"
|
||||||
|
elif embedding_scanner and await embedding_scanner.check_model_version_exists(model_version_id):
|
||||||
|
exists = True
|
||||||
|
model_type = "embedding"
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "exists": exists, "modelType": model_type if exists else None})
|
||||||
|
|
||||||
|
lora_versions = await lora_scanner.get_model_versions_by_id(model_id)
|
||||||
|
checkpoint_versions = []
|
||||||
|
embedding_versions = []
|
||||||
|
if not lora_versions and checkpoint_scanner:
|
||||||
|
checkpoint_versions = await checkpoint_scanner.get_model_versions_by_id(model_id)
|
||||||
|
if not lora_versions and not checkpoint_versions and embedding_scanner:
|
||||||
|
embedding_versions = await embedding_scanner.get_model_versions_by_id(model_id)
|
||||||
|
|
||||||
|
model_type = None
|
||||||
|
versions = []
|
||||||
|
if lora_versions:
|
||||||
|
model_type = "lora"
|
||||||
|
versions = lora_versions
|
||||||
|
elif checkpoint_versions:
|
||||||
|
model_type = "checkpoint"
|
||||||
|
versions = checkpoint_versions
|
||||||
|
elif embedding_versions:
|
||||||
|
model_type = "embedding"
|
||||||
|
versions = embedding_versions
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "modelType": model_type, "versions": versions})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to check model existence: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_model_versions_status(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
model_id_str = request.query.get("modelId")
|
||||||
|
if not model_id_str:
|
||||||
|
return web.json_response({"success": False, "error": "Missing required parameter: modelId"}, status=400)
|
||||||
|
try:
|
||||||
|
model_id = int(model_id_str)
|
||||||
|
except ValueError:
|
||||||
|
return web.json_response({"success": False, "error": "Parameter modelId must be an integer"}, status=400)
|
||||||
|
|
||||||
|
metadata_provider = await self._metadata_provider_factory()
|
||||||
|
if not metadata_provider:
|
||||||
|
return web.json_response({"success": False, "error": "Metadata provider not available"}, status=503)
|
||||||
|
|
||||||
|
response = await metadata_provider.get_model_versions(model_id)
|
||||||
|
if not response or not response.get("modelVersions"):
|
||||||
|
return web.json_response({"success": False, "error": "Model not found"}, status=404)
|
||||||
|
|
||||||
|
versions = response.get("modelVersions", [])
|
||||||
|
model_name = response.get("name", "")
|
||||||
|
model_type = response.get("type", "").lower()
|
||||||
|
|
||||||
|
scanner = None
|
||||||
|
normalized_type = None
|
||||||
|
if model_type in {"lora", "locon", "dora"}:
|
||||||
|
scanner = await self._service_registry.get_lora_scanner()
|
||||||
|
normalized_type = "lora"
|
||||||
|
elif model_type == "checkpoint":
|
||||||
|
scanner = await self._service_registry.get_checkpoint_scanner()
|
||||||
|
normalized_type = "checkpoint"
|
||||||
|
elif model_type == "textualinversion":
|
||||||
|
scanner = await self._service_registry.get_embedding_scanner()
|
||||||
|
normalized_type = "embedding"
|
||||||
|
else:
|
||||||
|
return web.json_response({"success": False, "error": f'Model type "{model_type}" is not supported'}, status=400)
|
||||||
|
|
||||||
|
if not scanner:
|
||||||
|
return web.json_response({"success": False, "error": f'Scanner for type "{normalized_type}" is not available'}, status=503)
|
||||||
|
|
||||||
|
local_versions = await scanner.get_model_versions_by_id(model_id)
|
||||||
|
local_version_ids = {version["versionId"] for version in local_versions}
|
||||||
|
|
||||||
|
enriched_versions = []
|
||||||
|
for version in versions:
|
||||||
|
version_id = version.get("id")
|
||||||
|
enriched_versions.append(
|
||||||
|
{
|
||||||
|
"id": version_id,
|
||||||
|
"name": version.get("name", ""),
|
||||||
|
"thumbnailUrl": version.get("images")[0]["url"] if version.get("images") else None,
|
||||||
|
"inLibrary": version_id in local_version_ids,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"modelId": model_id,
|
||||||
|
"modelName": model_name,
|
||||||
|
"modelType": model_type,
|
||||||
|
"versions": enriched_versions,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to get model versions status: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataArchiveHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
metadata_archive_manager_factory: Callable[[], Awaitable[MetadataArchiveManagerProtocol]] = get_metadata_archive_manager,
|
||||||
|
settings_service=default_settings,
|
||||||
|
metadata_provider_updater: Callable[[], Awaitable[None]] = update_metadata_providers,
|
||||||
|
) -> None:
|
||||||
|
self._metadata_archive_manager_factory = metadata_archive_manager_factory
|
||||||
|
self._settings = settings_service
|
||||||
|
self._metadata_provider_updater = metadata_provider_updater
|
||||||
|
|
||||||
|
async def download_metadata_archive(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
archive_manager = await self._metadata_archive_manager_factory()
|
||||||
|
download_id = request.query.get("download_id")
|
||||||
|
|
||||||
|
def progress_callback(stage: str, message: str) -> None:
|
||||||
|
data = {"stage": stage, "message": message, "type": "metadata_archive_download"}
|
||||||
|
if download_id:
|
||||||
|
asyncio.create_task(ws_manager.broadcast_download_progress(download_id, data))
|
||||||
|
else:
|
||||||
|
asyncio.create_task(ws_manager.broadcast(data))
|
||||||
|
|
||||||
|
success = await archive_manager.download_and_extract_database(progress_callback)
|
||||||
|
if success:
|
||||||
|
self._settings.set("enable_metadata_archive_db", True)
|
||||||
|
await self._metadata_provider_updater()
|
||||||
|
return web.json_response({"success": True, "message": "Metadata archive database downloaded and extracted successfully"})
|
||||||
|
return web.json_response({"success": False, "error": "Failed to download and extract metadata archive database"}, status=500)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error downloading metadata archive: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def remove_metadata_archive(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
archive_manager = await self._metadata_archive_manager_factory()
|
||||||
|
success = await archive_manager.remove_database()
|
||||||
|
if success:
|
||||||
|
self._settings.set("enable_metadata_archive_db", False)
|
||||||
|
await self._metadata_provider_updater()
|
||||||
|
return web.json_response({"success": True, "message": "Metadata archive database removed successfully"})
|
||||||
|
return web.json_response({"success": False, "error": "Failed to remove metadata archive database"}, status=500)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error removing metadata archive: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_metadata_archive_status(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
archive_manager = await self._metadata_archive_manager_factory()
|
||||||
|
is_available = archive_manager.is_database_available()
|
||||||
|
is_enabled = self._settings.get("enable_metadata_archive_db", False)
|
||||||
|
db_size = 0
|
||||||
|
if is_available:
|
||||||
|
db_path = archive_manager.get_database_path()
|
||||||
|
if db_path and os.path.exists(db_path):
|
||||||
|
db_size = os.path.getsize(db_path)
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"isAvailable": is_available,
|
||||||
|
"isEnabled": is_enabled,
|
||||||
|
"databaseSize": db_size,
|
||||||
|
"databasePath": archive_manager.get_database_path() if is_available else None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error getting metadata archive status: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class FileSystemHandler:
|
||||||
|
async def open_file_location(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
file_path = data.get("file_path")
|
||||||
|
if not file_path:
|
||||||
|
return web.json_response({"success": False, "error": "Missing file_path parameter"}, status=400)
|
||||||
|
file_path = os.path.abspath(file_path)
|
||||||
|
if not os.path.isfile(file_path):
|
||||||
|
return web.json_response({"success": False, "error": "File does not exist"}, status=404)
|
||||||
|
|
||||||
|
if os.name == "nt":
|
||||||
|
subprocess.Popen(["explorer", "/select,", file_path])
|
||||||
|
elif os.name == "posix":
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
subprocess.Popen(["open", "-R", file_path])
|
||||||
|
else:
|
||||||
|
folder = os.path.dirname(file_path)
|
||||||
|
subprocess.Popen(["xdg-open", folder])
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "message": f"Opened folder and selected file: {file_path}"})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to open file location: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class NodeRegistryHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
node_registry: NodeRegistry,
|
||||||
|
prompt_server: type[PromptServerProtocol],
|
||||||
|
*,
|
||||||
|
standalone_mode: bool,
|
||||||
|
) -> None:
|
||||||
|
self._node_registry = node_registry
|
||||||
|
self._prompt_server = prompt_server
|
||||||
|
self._standalone_mode = standalone_mode
|
||||||
|
|
||||||
|
async def register_nodes(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
nodes = data.get("nodes", [])
|
||||||
|
if not isinstance(nodes, list):
|
||||||
|
return web.json_response({"success": False, "error": "nodes must be a list"}, status=400)
|
||||||
|
for index, node in enumerate(nodes):
|
||||||
|
if not isinstance(node, dict):
|
||||||
|
return web.json_response({"success": False, "error": f"Node {index} must be an object"}, status=400)
|
||||||
|
node_id = node.get("node_id")
|
||||||
|
if node_id is None:
|
||||||
|
return web.json_response({"success": False, "error": f"Node {index} missing node_id parameter"}, status=400)
|
||||||
|
try:
|
||||||
|
node["node_id"] = int(node_id)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return web.json_response({"success": False, "error": f"Node {index} node_id must be an integer"}, status=400)
|
||||||
|
|
||||||
|
await self._node_registry.register_nodes(nodes)
|
||||||
|
return web.json_response({"success": True, "message": f"{len(nodes)} nodes registered successfully"})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to register nodes: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_registry(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
if self._standalone_mode:
|
||||||
|
logger.warning("Registry refresh not available in standalone mode")
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"error": "Standalone Mode Active",
|
||||||
|
"message": "Cannot interact with ComfyUI in standalone mode.",
|
||||||
|
},
|
||||||
|
status=503,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._prompt_server.instance.send_sync("lora_registry_refresh", {})
|
||||||
|
logger.debug("Sent registry refresh request to frontend")
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Failed to send registry refresh message: %s", exc)
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"error": "Communication Error",
|
||||||
|
"message": f"Failed to communicate with ComfyUI frontend: {exc}",
|
||||||
|
},
|
||||||
|
status=500,
|
||||||
|
)
|
||||||
|
|
||||||
|
registry_updated = await self._node_registry.wait_for_update(timeout=1.0)
|
||||||
|
if not registry_updated:
|
||||||
|
logger.warning("Registry refresh timeout after 1 second")
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"error": "Timeout Error",
|
||||||
|
"message": "Registry refresh timeout - ComfyUI frontend may not be responsive",
|
||||||
|
},
|
||||||
|
status=408,
|
||||||
|
)
|
||||||
|
|
||||||
|
registry_info = await self._node_registry.get_registry()
|
||||||
|
return web.json_response({"success": True, "data": registry_info})
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Failed to get registry: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": "Internal Error", "message": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class MiscHandlerSet:
|
||||||
|
"""Aggregate handlers into a lookup compatible with the registrar."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
health: HealthCheckHandler,
|
||||||
|
settings: SettingsHandler,
|
||||||
|
usage_stats: UsageStatsHandler,
|
||||||
|
lora_code: LoraCodeHandler,
|
||||||
|
trained_words: TrainedWordsHandler,
|
||||||
|
model_examples: ModelExampleFilesHandler,
|
||||||
|
node_registry: NodeRegistryHandler,
|
||||||
|
model_library: ModelLibraryHandler,
|
||||||
|
metadata_archive: MetadataArchiveHandler,
|
||||||
|
filesystem: FileSystemHandler,
|
||||||
|
) -> None:
|
||||||
|
self.health = health
|
||||||
|
self.settings = settings
|
||||||
|
self.usage_stats = usage_stats
|
||||||
|
self.lora_code = lora_code
|
||||||
|
self.trained_words = trained_words
|
||||||
|
self.model_examples = model_examples
|
||||||
|
self.node_registry = node_registry
|
||||||
|
self.model_library = model_library
|
||||||
|
self.metadata_archive = metadata_archive
|
||||||
|
self.filesystem = filesystem
|
||||||
|
|
||||||
|
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], Awaitable[web.StreamResponse]]]:
|
||||||
|
return {
|
||||||
|
"health_check": self.health.health_check,
|
||||||
|
"get_settings": self.settings.get_settings,
|
||||||
|
"update_settings": self.settings.update_settings,
|
||||||
|
"get_settings_libraries": self.settings.get_libraries,
|
||||||
|
"activate_library": self.settings.activate_library,
|
||||||
|
"update_usage_stats": self.usage_stats.update_usage_stats,
|
||||||
|
"get_usage_stats": self.usage_stats.get_usage_stats,
|
||||||
|
"update_lora_code": self.lora_code.update_lora_code,
|
||||||
|
"get_trained_words": self.trained_words.get_trained_words,
|
||||||
|
"get_model_example_files": self.model_examples.get_model_example_files,
|
||||||
|
"register_nodes": self.node_registry.register_nodes,
|
||||||
|
"get_registry": self.node_registry.get_registry,
|
||||||
|
"check_model_exists": self.model_library.check_model_exists,
|
||||||
|
"download_metadata_archive": self.metadata_archive.download_metadata_archive,
|
||||||
|
"remove_metadata_archive": self.metadata_archive.remove_metadata_archive,
|
||||||
|
"get_metadata_archive_status": self.metadata_archive.get_metadata_archive_status,
|
||||||
|
"get_model_versions_status": self.model_library.get_model_versions_status,
|
||||||
|
"open_file_location": self.filesystem.open_file_location,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def build_service_registry_adapter() -> ServiceRegistryAdapter:
|
||||||
|
return ServiceRegistryAdapter(
|
||||||
|
get_lora_scanner=ServiceRegistry.get_lora_scanner,
|
||||||
|
get_checkpoint_scanner=ServiceRegistry.get_checkpoint_scanner,
|
||||||
|
get_embedding_scanner=ServiceRegistry.get_embedding_scanner,
|
||||||
|
)
|
||||||
1020
py/routes/handlers/model_handlers.py
Normal file
1020
py/routes/handlers/model_handlers.py
Normal file
File diff suppressed because it is too large
Load Diff
56
py/routes/handlers/preview_handlers.py
Normal file
56
py/routes/handlers/preview_handlers.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Handlers responsible for serving preview assets dynamically."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import urllib.parse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ...config import config as global_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PreviewHandler:
|
||||||
|
"""Serve preview assets for the active library at request time."""
|
||||||
|
|
||||||
|
def __init__(self, *, config=global_config) -> None:
|
||||||
|
self._config = config
|
||||||
|
|
||||||
|
async def serve_preview(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
"""Return the preview file referenced by the encoded ``path`` query."""
|
||||||
|
|
||||||
|
raw_path = request.query.get("path", "")
|
||||||
|
if not raw_path:
|
||||||
|
raise web.HTTPBadRequest(text="Missing 'path' query parameter")
|
||||||
|
|
||||||
|
try:
|
||||||
|
decoded_path = urllib.parse.unquote(raw_path)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.debug("Failed to decode preview path %s: %s", raw_path, exc)
|
||||||
|
raise web.HTTPBadRequest(text="Invalid preview path encoding") from exc
|
||||||
|
|
||||||
|
normalized = decoded_path.replace("\\", "/")
|
||||||
|
candidate = Path(normalized)
|
||||||
|
try:
|
||||||
|
resolved = candidate.expanduser().resolve(strict=False)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("Failed to resolve preview path %s: %s", normalized, exc)
|
||||||
|
raise web.HTTPBadRequest(text="Unable to resolve preview path") from exc
|
||||||
|
|
||||||
|
resolved_str = str(resolved)
|
||||||
|
if not self._config.is_preview_path_allowed(resolved_str):
|
||||||
|
logger.debug("Rejected preview outside allowed roots: %s", resolved_str)
|
||||||
|
raise web.HTTPForbidden(text="Preview path is not within an allowed directory")
|
||||||
|
|
||||||
|
if not resolved.is_file():
|
||||||
|
logger.debug("Preview file not found at %s", resolved_str)
|
||||||
|
raise web.HTTPNotFound(text="Preview file not found")
|
||||||
|
|
||||||
|
# aiohttp's FileResponse handles range requests and content headers for us.
|
||||||
|
return web.FileResponse(path=resolved, chunk_size=256 * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["PreviewHandler"]
|
||||||
723
py/routes/handlers/recipe_handlers.py
Normal file
723
py/routes/handlers/recipe_handlers.py
Normal file
@@ -0,0 +1,723 @@
|
|||||||
|
"""Dedicated handler objects for recipe-related routes."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, Mapping, Optional
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ...config import config
|
||||||
|
from ...services.server_i18n import server_i18n as default_server_i18n
|
||||||
|
from ...services.settings_manager import SettingsManager
|
||||||
|
from ...services.recipes import (
|
||||||
|
RecipeAnalysisService,
|
||||||
|
RecipeDownloadError,
|
||||||
|
RecipeNotFoundError,
|
||||||
|
RecipePersistenceService,
|
||||||
|
RecipeSharingService,
|
||||||
|
RecipeValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
Logger = logging.Logger
|
||||||
|
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
|
||||||
|
RecipeScannerGetter = Callable[[], Any]
|
||||||
|
CivitaiClientGetter = Callable[[], Any]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RecipeHandlerSet:
|
||||||
|
"""Group of handlers providing recipe route implementations."""
|
||||||
|
|
||||||
|
page_view: "RecipePageView"
|
||||||
|
listing: "RecipeListingHandler"
|
||||||
|
query: "RecipeQueryHandler"
|
||||||
|
management: "RecipeManagementHandler"
|
||||||
|
analysis: "RecipeAnalysisHandler"
|
||||||
|
sharing: "RecipeSharingHandler"
|
||||||
|
|
||||||
|
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], Awaitable[web.StreamResponse]]]:
|
||||||
|
"""Expose handler coroutines keyed by registrar handler names."""
|
||||||
|
|
||||||
|
return {
|
||||||
|
"render_page": self.page_view.render_page,
|
||||||
|
"list_recipes": self.listing.list_recipes,
|
||||||
|
"get_recipe": self.listing.get_recipe,
|
||||||
|
"analyze_uploaded_image": self.analysis.analyze_uploaded_image,
|
||||||
|
"analyze_local_image": self.analysis.analyze_local_image,
|
||||||
|
"save_recipe": self.management.save_recipe,
|
||||||
|
"delete_recipe": self.management.delete_recipe,
|
||||||
|
"get_top_tags": self.query.get_top_tags,
|
||||||
|
"get_base_models": self.query.get_base_models,
|
||||||
|
"share_recipe": self.sharing.share_recipe,
|
||||||
|
"download_shared_recipe": self.sharing.download_shared_recipe,
|
||||||
|
"get_recipe_syntax": self.query.get_recipe_syntax,
|
||||||
|
"update_recipe": self.management.update_recipe,
|
||||||
|
"reconnect_lora": self.management.reconnect_lora,
|
||||||
|
"find_duplicates": self.query.find_duplicates,
|
||||||
|
"bulk_delete": self.management.bulk_delete,
|
||||||
|
"save_recipe_from_widget": self.management.save_recipe_from_widget,
|
||||||
|
"get_recipes_for_lora": self.query.get_recipes_for_lora,
|
||||||
|
"scan_recipes": self.query.scan_recipes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RecipePageView:
|
||||||
|
"""Render the recipe shell page."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
settings_service: SettingsManager,
|
||||||
|
server_i18n=default_server_i18n,
|
||||||
|
template_env,
|
||||||
|
template_name: str,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
logger: Logger,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._settings = settings_service
|
||||||
|
self._server_i18n = server_i18n
|
||||||
|
self._template_env = template_env
|
||||||
|
self._template_name = template_name
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
async def render_page(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None: # pragma: no cover - defensive guard
|
||||||
|
raise RuntimeError("Recipe scanner not available")
|
||||||
|
|
||||||
|
user_language = self._settings.get("language", "en")
|
||||||
|
self._server_i18n.set_locale(user_language)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await recipe_scanner.get_cached_data(force_refresh=False)
|
||||||
|
rendered = self._template_env.get_template(self._template_name).render(
|
||||||
|
recipes=[],
|
||||||
|
is_initializing=False,
|
||||||
|
settings=self._settings,
|
||||||
|
request=request,
|
||||||
|
t=self._server_i18n.get_translation,
|
||||||
|
)
|
||||||
|
except Exception as cache_error: # pragma: no cover - logging path
|
||||||
|
self._logger.error("Error loading recipe cache data: %s", cache_error)
|
||||||
|
rendered = self._template_env.get_template(self._template_name).render(
|
||||||
|
is_initializing=True,
|
||||||
|
settings=self._settings,
|
||||||
|
request=request,
|
||||||
|
t=self._server_i18n.get_translation,
|
||||||
|
)
|
||||||
|
return web.Response(text=rendered, content_type="text/html")
|
||||||
|
except Exception as exc: # pragma: no cover - logging path
|
||||||
|
self._logger.error("Error handling recipes request: %s", exc, exc_info=True)
|
||||||
|
return web.Response(text="Error loading recipes page", status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeListingHandler:
|
||||||
|
"""Provide listing and detail APIs for recipes."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
logger: Logger,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
async def list_recipes(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
page = int(request.query.get("page", "1"))
|
||||||
|
page_size = int(request.query.get("page_size", "20"))
|
||||||
|
sort_by = request.query.get("sort_by", "date")
|
||||||
|
search = request.query.get("search")
|
||||||
|
|
||||||
|
search_options = {
|
||||||
|
"title": request.query.get("search_title", "true").lower() == "true",
|
||||||
|
"tags": request.query.get("search_tags", "true").lower() == "true",
|
||||||
|
"lora_name": request.query.get("search_lora_name", "true").lower() == "true",
|
||||||
|
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
filters: Dict[str, list[str]] = {}
|
||||||
|
base_models = request.query.get("base_models")
|
||||||
|
if base_models:
|
||||||
|
filters["base_model"] = base_models.split(",")
|
||||||
|
|
||||||
|
tags = request.query.get("tags")
|
||||||
|
if tags:
|
||||||
|
filters["tags"] = tags.split(",")
|
||||||
|
|
||||||
|
lora_hash = request.query.get("lora_hash")
|
||||||
|
|
||||||
|
result = await recipe_scanner.get_paginated_data(
|
||||||
|
page=page,
|
||||||
|
page_size=page_size,
|
||||||
|
sort_by=sort_by,
|
||||||
|
search=search,
|
||||||
|
filters=filters,
|
||||||
|
search_options=search_options,
|
||||||
|
lora_hash=lora_hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
for item in result.get("items", []):
|
||||||
|
file_path = item.get("file_path")
|
||||||
|
if file_path:
|
||||||
|
item["file_url"] = self.format_recipe_file_url(file_path)
|
||||||
|
else:
|
||||||
|
item.setdefault("file_url", "/loras_static/images/no-preview.png")
|
||||||
|
item.setdefault("loras", [])
|
||||||
|
item.setdefault("base_model", "")
|
||||||
|
|
||||||
|
return web.json_response(result)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving recipes: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
|
||||||
|
if not recipe:
|
||||||
|
return web.json_response({"error": "Recipe not found"}, status=404)
|
||||||
|
return web.json_response(recipe)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving recipe details: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
def format_recipe_file_url(self, file_path: str) -> str:
|
||||||
|
try:
|
||||||
|
normalized_path = os.path.normpath(file_path)
|
||||||
|
static_url = config.get_preview_static_url(normalized_path)
|
||||||
|
if static_url:
|
||||||
|
return static_url
|
||||||
|
except Exception as exc: # pragma: no cover - logging path
|
||||||
|
self._logger.error("Error formatting recipe file URL: %s", exc, exc_info=True)
|
||||||
|
return "/loras_static/images/no-preview.png"
|
||||||
|
|
||||||
|
return "/loras_static/images/no-preview.png"
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeQueryHandler:
|
||||||
|
"""Provide read-only insights on recipe data."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
format_recipe_file_url: Callable[[str], str],
|
||||||
|
logger: Logger,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._format_recipe_file_url = format_recipe_file_url
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
async def get_top_tags(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
limit = int(request.query.get("limit", "20"))
|
||||||
|
cache = await recipe_scanner.get_cached_data()
|
||||||
|
|
||||||
|
tag_counts: Dict[str, int] = {}
|
||||||
|
for recipe in getattr(cache, "raw_data", []):
|
||||||
|
for tag in recipe.get("tags", []) or []:
|
||||||
|
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
||||||
|
|
||||||
|
sorted_tags = [{"tag": tag, "count": count} for tag, count in tag_counts.items()]
|
||||||
|
sorted_tags.sort(key=lambda entry: entry["count"], reverse=True)
|
||||||
|
return web.json_response({"success": True, "tags": sorted_tags[:limit]})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving top tags: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_base_models(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
cache = await recipe_scanner.get_cached_data()
|
||||||
|
|
||||||
|
base_model_counts: Dict[str, int] = {}
|
||||||
|
for recipe in getattr(cache, "raw_data", []):
|
||||||
|
base_model = recipe.get("base_model")
|
||||||
|
if base_model:
|
||||||
|
base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1
|
||||||
|
|
||||||
|
sorted_models = [{"name": model, "count": count} for model, count in base_model_counts.items()]
|
||||||
|
sorted_models.sort(key=lambda entry: entry["count"], reverse=True)
|
||||||
|
return web.json_response({"success": True, "base_models": sorted_models})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving base models: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_recipes_for_lora(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
lora_hash = request.query.get("hash")
|
||||||
|
if not lora_hash:
|
||||||
|
return web.json_response({"success": False, "error": "Lora hash is required"}, status=400)
|
||||||
|
|
||||||
|
matching_recipes = await recipe_scanner.get_recipes_for_lora(lora_hash)
|
||||||
|
return web.json_response({"success": True, "recipes": matching_recipes})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error getting recipes for Lora: %s", exc)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def scan_recipes(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
self._logger.info("Manually triggering recipe cache rebuild")
|
||||||
|
await recipe_scanner.get_cached_data(force_refresh=True)
|
||||||
|
return web.json_response({"success": True, "message": "Recipe cache refreshed successfully"})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error refreshing recipe cache: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def find_duplicates(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
duplicate_groups = await recipe_scanner.find_all_duplicate_recipes()
|
||||||
|
response_data = []
|
||||||
|
|
||||||
|
for fingerprint, recipe_ids in duplicate_groups.items():
|
||||||
|
if len(recipe_ids) <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
recipes = []
|
||||||
|
for recipe_id in recipe_ids:
|
||||||
|
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
if recipe:
|
||||||
|
recipes.append(
|
||||||
|
{
|
||||||
|
"id": recipe.get("id"),
|
||||||
|
"title": recipe.get("title"),
|
||||||
|
"file_url": recipe.get("file_url")
|
||||||
|
or self._format_recipe_file_url(recipe.get("file_path", "")),
|
||||||
|
"modified": recipe.get("modified"),
|
||||||
|
"created_date": recipe.get("created_date"),
|
||||||
|
"lora_count": len(recipe.get("loras", [])),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(recipes) >= 2:
|
||||||
|
recipes.sort(key=lambda entry: entry.get("modified", 0), reverse=True)
|
||||||
|
response_data.append(
|
||||||
|
{
|
||||||
|
"fingerprint": fingerprint,
|
||||||
|
"count": len(recipes),
|
||||||
|
"recipes": recipes,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
response_data.sort(key=lambda entry: entry["count"], reverse=True)
|
||||||
|
return web.json_response({"success": True, "duplicate_groups": response_data})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error finding duplicate recipes: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_recipe_syntax(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
try:
|
||||||
|
syntax_parts = await recipe_scanner.get_recipe_syntax_tokens(recipe_id)
|
||||||
|
except RecipeNotFoundError:
|
||||||
|
return web.json_response({"error": "Recipe not found"}, status=404)
|
||||||
|
|
||||||
|
if not syntax_parts:
|
||||||
|
return web.json_response({"error": "No LoRAs found in this recipe"}, status=400)
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "syntax": " ".join(syntax_parts)})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error generating recipe syntax: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeManagementHandler:
|
||||||
|
"""Handle create/update/delete style recipe operations."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
logger: Logger,
|
||||||
|
persistence_service: RecipePersistenceService,
|
||||||
|
analysis_service: RecipeAnalysisService,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._logger = logger
|
||||||
|
self._persistence_service = persistence_service
|
||||||
|
self._analysis_service = analysis_service
|
||||||
|
|
||||||
|
async def save_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
reader = await request.multipart()
|
||||||
|
payload = await self._parse_save_payload(reader)
|
||||||
|
|
||||||
|
result = await self._persistence_service.save_recipe(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
image_bytes=payload["image_bytes"],
|
||||||
|
image_base64=payload["image_base64"],
|
||||||
|
name=payload["name"],
|
||||||
|
tags=payload["tags"],
|
||||||
|
metadata=payload["metadata"],
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=400)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def delete_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
result = await self._persistence_service.delete_recipe(
|
||||||
|
recipe_scanner=recipe_scanner, recipe_id=recipe_id
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error deleting recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def update_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
data = await request.json()
|
||||||
|
result = await self._persistence_service.update_recipe(
|
||||||
|
recipe_scanner=recipe_scanner, recipe_id=recipe_id, updates=data
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error updating recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def reconnect_lora(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
for field in ("recipe_id", "lora_index", "target_name"):
|
||||||
|
if field not in data:
|
||||||
|
raise RecipeValidationError(f"Missing required field: {field}")
|
||||||
|
|
||||||
|
result = await self._persistence_service.reconnect_lora(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
recipe_id=data["recipe_id"],
|
||||||
|
lora_index=int(data["lora_index"]),
|
||||||
|
target_name=data["target_name"],
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error reconnecting LoRA: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def bulk_delete(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
recipe_ids = data.get("recipe_ids", [])
|
||||||
|
result = await self._persistence_service.bulk_delete(
|
||||||
|
recipe_scanner=recipe_scanner, recipe_ids=recipe_ids
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error performing bulk delete: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def save_recipe_from_widget(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
analysis = await self._analysis_service.analyze_widget_metadata(
|
||||||
|
recipe_scanner=recipe_scanner
|
||||||
|
)
|
||||||
|
metadata = analysis.payload.get("metadata")
|
||||||
|
image_bytes = analysis.payload.get("image_bytes")
|
||||||
|
if not metadata or image_bytes is None:
|
||||||
|
raise RecipeValidationError("Unable to extract metadata from widget")
|
||||||
|
|
||||||
|
result = await self._persistence_service.save_recipe_from_widget(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
metadata=metadata,
|
||||||
|
image_bytes=image_bytes,
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=400)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error saving recipe from widget: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def _parse_save_payload(self, reader) -> dict[str, Any]:
|
||||||
|
image_bytes: Optional[bytes] = None
|
||||||
|
image_base64: Optional[str] = None
|
||||||
|
name: Optional[str] = None
|
||||||
|
tags: list[str] = []
|
||||||
|
metadata: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
field = await reader.next()
|
||||||
|
if field is None:
|
||||||
|
break
|
||||||
|
if field.name == "image":
|
||||||
|
image_chunks = bytearray()
|
||||||
|
while True:
|
||||||
|
chunk = await field.read_chunk()
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
image_chunks.extend(chunk)
|
||||||
|
image_bytes = bytes(image_chunks)
|
||||||
|
elif field.name == "image_base64":
|
||||||
|
image_base64 = await field.text()
|
||||||
|
elif field.name == "name":
|
||||||
|
name = await field.text()
|
||||||
|
elif field.name == "tags":
|
||||||
|
tags_text = await field.text()
|
||||||
|
try:
|
||||||
|
parsed_tags = json.loads(tags_text)
|
||||||
|
tags = parsed_tags if isinstance(parsed_tags, list) else []
|
||||||
|
except Exception:
|
||||||
|
tags = []
|
||||||
|
elif field.name == "metadata":
|
||||||
|
metadata_text = await field.text()
|
||||||
|
try:
|
||||||
|
metadata = json.loads(metadata_text)
|
||||||
|
except Exception:
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"image_bytes": image_bytes,
|
||||||
|
"image_base64": image_base64,
|
||||||
|
"name": name,
|
||||||
|
"tags": tags,
|
||||||
|
"metadata": metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeAnalysisHandler:
|
||||||
|
"""Analyze images to extract recipe metadata."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
civitai_client_getter: CivitaiClientGetter,
|
||||||
|
logger: Logger,
|
||||||
|
analysis_service: RecipeAnalysisService,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._civitai_client_getter = civitai_client_getter
|
||||||
|
self._logger = logger
|
||||||
|
self._analysis_service = analysis_service
|
||||||
|
|
||||||
|
async def analyze_uploaded_image(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
civitai_client = self._civitai_client_getter()
|
||||||
|
if recipe_scanner is None or civitai_client is None:
|
||||||
|
raise RuntimeError("Required services unavailable")
|
||||||
|
|
||||||
|
content_type = request.headers.get("Content-Type", "")
|
||||||
|
if "multipart/form-data" in content_type:
|
||||||
|
reader = await request.multipart()
|
||||||
|
field = await reader.next()
|
||||||
|
if field is None or field.name != "image":
|
||||||
|
raise RecipeValidationError("No image field found")
|
||||||
|
image_chunks = bytearray()
|
||||||
|
while True:
|
||||||
|
chunk = await field.read_chunk()
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
image_chunks.extend(chunk)
|
||||||
|
result = await self._analysis_service.analyze_uploaded_image(
|
||||||
|
image_bytes=bytes(image_chunks),
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
|
||||||
|
if "application/json" in content_type:
|
||||||
|
data = await request.json()
|
||||||
|
result = await self._analysis_service.analyze_remote_image(
|
||||||
|
url=data.get("url"),
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
civitai_client=civitai_client,
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
|
||||||
|
raise RecipeValidationError("Unsupported content type")
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=400)
|
||||||
|
except RecipeDownloadError as exc:
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error analyzing recipe image: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=500)
|
||||||
|
|
||||||
|
async def analyze_local_image(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
result = await self._analysis_service.analyze_local_image(
|
||||||
|
file_path=data.get("path"),
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error analyzing local image: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc), "loras": []}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeSharingHandler:
|
||||||
|
"""Serve endpoints related to recipe sharing."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ensure_dependencies_ready: EnsureDependenciesCallable,
|
||||||
|
recipe_scanner_getter: RecipeScannerGetter,
|
||||||
|
logger: Logger,
|
||||||
|
sharing_service: RecipeSharingService,
|
||||||
|
) -> None:
|
||||||
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
|
self._logger = logger
|
||||||
|
self._sharing_service = sharing_service
|
||||||
|
|
||||||
|
async def share_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
result = await self._sharing_service.share_recipe(
|
||||||
|
recipe_scanner=recipe_scanner, recipe_id=recipe_id
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error sharing recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def download_shared_recipe(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
download_info = await self._sharing_service.prepare_download(
|
||||||
|
recipe_scanner=recipe_scanner, recipe_id=recipe_id
|
||||||
|
)
|
||||||
|
return web.FileResponse(
|
||||||
|
download_info.file_path,
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f'attachment; filename="{download_info.download_filename}"'
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error downloading shared recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
@@ -5,9 +5,9 @@ from typing import Dict
|
|||||||
from server import PromptServer # type: ignore
|
from server import PromptServer # type: ignore
|
||||||
|
|
||||||
from .base_model_routes import BaseModelRoutes
|
from .base_model_routes import BaseModelRoutes
|
||||||
|
from .model_route_registrar import ModelRouteRegistrar
|
||||||
from ..services.lora_service import LoraService
|
from ..services.lora_service import LoraService
|
||||||
from ..services.service_registry import ServiceRegistry
|
from ..services.service_registry import ServiceRegistry
|
||||||
from ..services.metadata_service import get_default_metadata_provider
|
|
||||||
from ..utils.utils import get_lora_info
|
from ..utils.utils import get_lora_info
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -17,8 +17,7 @@ class LoraRoutes(BaseModelRoutes):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize LoRA routes with LoRA service"""
|
"""Initialize LoRA routes with LoRA service"""
|
||||||
# Service will be initialized later via setup_routes
|
super().__init__()
|
||||||
self.service = None
|
|
||||||
self.template_name = "loras.html"
|
self.template_name = "loras.html"
|
||||||
|
|
||||||
async def initialize_services(self):
|
async def initialize_services(self):
|
||||||
@@ -26,31 +25,26 @@ class LoraRoutes(BaseModelRoutes):
|
|||||||
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||||
self.service = LoraService(lora_scanner)
|
self.service = LoraService(lora_scanner)
|
||||||
|
|
||||||
# Initialize parent with the service
|
# Attach service dependencies
|
||||||
super().__init__(self.service)
|
self.attach_service(self.service)
|
||||||
|
|
||||||
def setup_routes(self, app: web.Application):
|
def setup_routes(self, app: web.Application):
|
||||||
"""Setup LoRA routes"""
|
"""Setup LoRA routes"""
|
||||||
# Schedule service initialization on app startup
|
# Schedule service initialization on app startup
|
||||||
app.on_startup.append(lambda _: self.initialize_services())
|
app.on_startup.append(lambda _: self.initialize_services())
|
||||||
|
|
||||||
# Setup common routes with 'loras' prefix (includes page route)
|
# Setup common routes with 'loras' prefix (includes page route)
|
||||||
super().setup_routes(app, 'loras')
|
super().setup_routes(app, 'loras')
|
||||||
|
|
||||||
def setup_specific_routes(self, app: web.Application, prefix: str):
|
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
|
||||||
"""Setup LoRA-specific routes"""
|
"""Setup LoRA-specific routes"""
|
||||||
# LoRA-specific query routes
|
# LoRA-specific query routes
|
||||||
app.router.add_get(f'/api/{prefix}/letter-counts', self.get_letter_counts)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/letter-counts', prefix, self.get_letter_counts)
|
||||||
app.router.add_get(f'/api/{prefix}/get-trigger-words', self.get_lora_trigger_words)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/get-trigger-words', prefix, self.get_lora_trigger_words)
|
||||||
app.router.add_get(f'/api/{prefix}/usage-tips-by-path', self.get_lora_usage_tips_by_path)
|
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/usage-tips-by-path', prefix, self.get_lora_usage_tips_by_path)
|
||||||
|
|
||||||
# CivitAI integration with LoRA-specific validation
|
|
||||||
app.router.add_get(f'/api/{prefix}/civitai/versions/{{model_id}}', self.get_civitai_versions_lora)
|
|
||||||
app.router.add_get(f'/api/{prefix}/civitai/model/version/{{modelVersionId}}', self.get_civitai_model_by_version)
|
|
||||||
app.router.add_get(f'/api/{prefix}/civitai/model/hash/{{hash}}', self.get_civitai_model_by_hash)
|
|
||||||
|
|
||||||
# ComfyUI integration
|
# ComfyUI integration
|
||||||
app.router.add_post(f'/api/{prefix}/get_trigger_words', self.get_trigger_words)
|
registrar.add_prefixed_route('POST', '/api/lm/{prefix}/get_trigger_words', prefix, self.get_trigger_words)
|
||||||
|
|
||||||
def _parse_specific_params(self, request: web.Request) -> Dict:
|
def _parse_specific_params(self, request: web.Request) -> Dict:
|
||||||
"""Parse LoRA-specific parameters"""
|
"""Parse LoRA-specific parameters"""
|
||||||
@@ -76,6 +70,15 @@ class LoraRoutes(BaseModelRoutes):
|
|||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
def _validate_civitai_model_type(self, model_type: str) -> bool:
|
||||||
|
"""Validate CivitAI model type for LoRA"""
|
||||||
|
from ..utils.constants import VALID_LORA_TYPES
|
||||||
|
return model_type.lower() in VALID_LORA_TYPES
|
||||||
|
|
||||||
|
def _get_expected_model_types(self) -> str:
|
||||||
|
"""Get expected model types string for error messages"""
|
||||||
|
return "LORA, LoCon, or DORA"
|
||||||
|
|
||||||
# LoRA-specific route handlers
|
# LoRA-specific route handlers
|
||||||
async def get_letter_counts(self, request: web.Request) -> web.Response:
|
async def get_letter_counts(self, request: web.Request) -> web.Response:
|
||||||
"""Get count of LoRAs for each letter of the alphabet"""
|
"""Get count of LoRAs for each letter of the alphabet"""
|
||||||
@@ -210,94 +213,6 @@ class LoraRoutes(BaseModelRoutes):
|
|||||||
'error': str(e)
|
'error': str(e)
|
||||||
}, status=500)
|
}, status=500)
|
||||||
|
|
||||||
# CivitAI integration methods
|
|
||||||
async def get_civitai_versions_lora(self, request: web.Request) -> web.Response:
|
|
||||||
"""Get available versions for a Civitai LoRA model with local availability info"""
|
|
||||||
try:
|
|
||||||
model_id = request.match_info['model_id']
|
|
||||||
metadata_provider = await get_default_metadata_provider()
|
|
||||||
response = await metadata_provider.get_model_versions(model_id)
|
|
||||||
if not response or not response.get('modelVersions'):
|
|
||||||
return web.Response(status=404, text="Model not found")
|
|
||||||
|
|
||||||
versions = response.get('modelVersions', [])
|
|
||||||
model_type = response.get('type', '')
|
|
||||||
|
|
||||||
# Check model type - should be LORA, LoCon, or DORA
|
|
||||||
from ..utils.constants import VALID_LORA_TYPES
|
|
||||||
if model_type.lower() not in VALID_LORA_TYPES:
|
|
||||||
return web.json_response({
|
|
||||||
'error': f"Model type mismatch. Expected LORA or LoCon, got {model_type}"
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
# Check local availability for each version
|
|
||||||
for version in versions:
|
|
||||||
# Find the model file (type="Model") in the files list
|
|
||||||
model_file = next((file for file in version.get('files', [])
|
|
||||||
if file.get('type') == 'Model'), None)
|
|
||||||
|
|
||||||
if model_file:
|
|
||||||
sha256 = model_file.get('hashes', {}).get('SHA256')
|
|
||||||
if sha256:
|
|
||||||
# Set existsLocally and localPath at the version level
|
|
||||||
version['existsLocally'] = self.service.has_hash(sha256)
|
|
||||||
if version['existsLocally']:
|
|
||||||
version['localPath'] = self.service.get_path_by_hash(sha256)
|
|
||||||
|
|
||||||
# Also set the model file size at the version level for easier access
|
|
||||||
version['modelSizeKB'] = model_file.get('sizeKB')
|
|
||||||
else:
|
|
||||||
# No model file found in this version
|
|
||||||
version['existsLocally'] = False
|
|
||||||
|
|
||||||
return web.json_response(versions)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching LoRA model versions: {e}")
|
|
||||||
return web.Response(status=500, text=str(e))
|
|
||||||
|
|
||||||
async def get_civitai_model_by_version(self, request: web.Request) -> web.Response:
|
|
||||||
"""Get CivitAI model details by model version ID"""
|
|
||||||
try:
|
|
||||||
model_version_id = request.match_info.get('modelVersionId')
|
|
||||||
|
|
||||||
# Get model details from metadata provider
|
|
||||||
metadata_provider = await get_default_metadata_provider()
|
|
||||||
model, error_msg = await metadata_provider.get_model_version_info(model_version_id)
|
|
||||||
|
|
||||||
if not model:
|
|
||||||
# Log warning for failed model retrieval
|
|
||||||
logger.warning(f"Failed to fetch model version {model_version_id}: {error_msg}")
|
|
||||||
|
|
||||||
# Determine status code based on error message
|
|
||||||
status_code = 404 if error_msg and "not found" in error_msg.lower() else 500
|
|
||||||
|
|
||||||
return web.json_response({
|
|
||||||
"success": False,
|
|
||||||
"error": error_msg or "Failed to fetch model information"
|
|
||||||
}, status=status_code)
|
|
||||||
|
|
||||||
return web.json_response(model)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching model details: {e}")
|
|
||||||
return web.json_response({
|
|
||||||
"success": False,
|
|
||||||
"error": str(e)
|
|
||||||
}, status=500)
|
|
||||||
|
|
||||||
async def get_civitai_model_by_hash(self, request: web.Request) -> web.Response:
|
|
||||||
"""Get CivitAI model details by hash"""
|
|
||||||
try:
|
|
||||||
hash = request.match_info.get('hash')
|
|
||||||
metadata_provider = await get_default_metadata_provider()
|
|
||||||
model = await metadata_provider.get_model_by_hash(hash)
|
|
||||||
return web.json_response(model)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching model details by hash: {e}")
|
|
||||||
return web.json_response({
|
|
||||||
"success": False,
|
|
||||||
"error": str(e)
|
|
||||||
}, status=500)
|
|
||||||
|
|
||||||
async def get_trigger_words(self, request: web.Request) -> web.Response:
|
async def get_trigger_words(self, request: web.Request) -> web.Response:
|
||||||
"""Get trigger words for specified LoRA models"""
|
"""Get trigger words for specified LoRA models"""
|
||||||
try:
|
try:
|
||||||
|
|||||||
69
py/routes/misc_route_registrar.py
Normal file
69
py/routes/misc_route_registrar.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
"""Route registrar for miscellaneous endpoints.
|
||||||
|
|
||||||
|
This module mirrors the model route registrar architecture so that
|
||||||
|
miscellaneous endpoints share a consistent registration flow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Iterable, Mapping
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RouteDefinition:
|
||||||
|
"""Declarative definition for a HTTP route."""
|
||||||
|
|
||||||
|
method: str
|
||||||
|
path: str
|
||||||
|
handler_name: str
|
||||||
|
|
||||||
|
|
||||||
|
MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||||
|
RouteDefinition("GET", "/api/lm/settings", "get_settings"),
|
||||||
|
RouteDefinition("POST", "/api/lm/settings", "update_settings"),
|
||||||
|
RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"),
|
||||||
|
RouteDefinition("POST", "/api/lm/settings/libraries/activate", "activate_library"),
|
||||||
|
RouteDefinition("GET", "/api/lm/health-check", "health_check"),
|
||||||
|
RouteDefinition("POST", "/api/lm/open-file-location", "open_file_location"),
|
||||||
|
RouteDefinition("POST", "/api/lm/update-usage-stats", "update_usage_stats"),
|
||||||
|
RouteDefinition("GET", "/api/lm/get-usage-stats", "get_usage_stats"),
|
||||||
|
RouteDefinition("POST", "/api/lm/update-lora-code", "update_lora_code"),
|
||||||
|
RouteDefinition("GET", "/api/lm/trained-words", "get_trained_words"),
|
||||||
|
RouteDefinition("GET", "/api/lm/model-example-files", "get_model_example_files"),
|
||||||
|
RouteDefinition("POST", "/api/lm/register-nodes", "register_nodes"),
|
||||||
|
RouteDefinition("GET", "/api/lm/get-registry", "get_registry"),
|
||||||
|
RouteDefinition("GET", "/api/lm/check-model-exists", "check_model_exists"),
|
||||||
|
RouteDefinition("POST", "/api/lm/download-metadata-archive", "download_metadata_archive"),
|
||||||
|
RouteDefinition("POST", "/api/lm/remove-metadata-archive", "remove_metadata_archive"),
|
||||||
|
RouteDefinition("GET", "/api/lm/metadata-archive-status", "get_metadata_archive_status"),
|
||||||
|
RouteDefinition("GET", "/api/lm/model-versions-status", "get_model_versions_status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MiscRouteRegistrar:
|
||||||
|
"""Bind miscellaneous route definitions to an aiohttp router."""
|
||||||
|
|
||||||
|
_METHOD_MAP = {
|
||||||
|
"GET": "add_get",
|
||||||
|
"POST": "add_post",
|
||||||
|
"PUT": "add_put",
|
||||||
|
"DELETE": "add_delete",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, app: web.Application) -> None:
|
||||||
|
self._app = app
|
||||||
|
|
||||||
|
def register_routes(
|
||||||
|
self,
|
||||||
|
handler_lookup: Mapping[str, Callable[[web.Request], object]],
|
||||||
|
*,
|
||||||
|
definitions: Iterable[RouteDefinition] = MISC_ROUTE_DEFINITIONS,
|
||||||
|
) -> None:
|
||||||
|
for definition in definitions:
|
||||||
|
self._bind(definition.method, definition.path, handler_lookup[definition.handler_name])
|
||||||
|
|
||||||
|
def _bind(self, method: str, path: str, handler: Callable) -> None:
|
||||||
|
add_method_name = self._METHOD_MAP[method.upper()]
|
||||||
|
add_method = getattr(self._app.router, add_method_name)
|
||||||
|
add_method(path, handler)
|
||||||
File diff suppressed because it is too large
Load Diff
99
py/routes/model_route_registrar.py
Normal file
99
py/routes/model_route_registrar.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
"""Route registrar for model endpoints."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Iterable, Mapping
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RouteDefinition:
|
||||||
|
"""Declarative definition for a HTTP route."""
|
||||||
|
|
||||||
|
method: str
|
||||||
|
path_template: str
|
||||||
|
handler_name: str
|
||||||
|
|
||||||
|
def build_path(self, prefix: str) -> str:
|
||||||
|
return self.path_template.replace("{prefix}", prefix)
|
||||||
|
|
||||||
|
|
||||||
|
COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/list", "get_models"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/delete", "delete_model"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/exclude", "exclude_model"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/fetch-civitai", "fetch_civitai"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/fetch-all-civitai", "fetch_all_civitai"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/relink-civitai", "relink_civitai"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/replace-preview", "replace_preview"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/save-metadata", "save_metadata"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/add-tags", "add_tags"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/rename", "rename_model"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/bulk-delete", "bulk_delete_models"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/verify-duplicates", "verify_duplicates"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/move_model", "move_model"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/move_models_bulk", "move_models_bulk"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
|
||||||
|
RouteDefinition("POST", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize-progress", "get_auto_organize_progress"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/top-tags", "get_top_tags"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/base-models", "get_base_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/scan", "scan_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/roots", "get_model_roots"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/folders", "get_folders"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/folder-tree", "get_folder_tree"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/unified-folder-tree", "get_unified_folder_tree"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/find-duplicates", "find_duplicate_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/find-filename-conflicts", "find_filename_conflicts"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/get-notes", "get_model_notes"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/preview-url", "get_model_preview_url"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/civitai-url", "get_model_civitai_url"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/metadata", "get_model_metadata"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/model-description", "get_model_description"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/relative-paths", "get_relative_paths"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/civitai/versions/{model_id}", "get_civitai_versions"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/version/{modelVersionId}", "get_civitai_model_by_version"),
|
||||||
|
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/hash/{hash}", "get_civitai_model_by_hash"),
|
||||||
|
RouteDefinition("POST", "/api/lm/download-model", "download_model"),
|
||||||
|
RouteDefinition("GET", "/api/lm/download-model-get", "download_model_get"),
|
||||||
|
RouteDefinition("GET", "/api/lm/cancel-download-get", "cancel_download_get"),
|
||||||
|
RouteDefinition("GET", "/api/lm/download-progress/{download_id}", "get_download_progress"),
|
||||||
|
RouteDefinition("GET", "/{prefix}", "handle_models_page"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelRouteRegistrar:
|
||||||
|
"""Bind declarative definitions to an aiohttp router."""
|
||||||
|
|
||||||
|
_METHOD_MAP = {
|
||||||
|
"GET": "add_get",
|
||||||
|
"POST": "add_post",
|
||||||
|
"PUT": "add_put",
|
||||||
|
"DELETE": "add_delete",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, app: web.Application) -> None:
|
||||||
|
self._app = app
|
||||||
|
|
||||||
|
def register_common_routes(
|
||||||
|
self,
|
||||||
|
prefix: str,
|
||||||
|
handler_lookup: Mapping[str, Callable[[web.Request], object]],
|
||||||
|
*,
|
||||||
|
definitions: Iterable[RouteDefinition] = COMMON_ROUTE_DEFINITIONS,
|
||||||
|
) -> None:
|
||||||
|
for definition in definitions:
|
||||||
|
self._bind_route(definition.method, definition.build_path(prefix), handler_lookup[definition.handler_name])
|
||||||
|
|
||||||
|
def add_route(self, method: str, path: str, handler: Callable) -> None:
|
||||||
|
self._bind_route(method, path, handler)
|
||||||
|
|
||||||
|
def add_prefixed_route(self, method: str, path_template: str, prefix: str, handler: Callable) -> None:
|
||||||
|
self._bind_route(method, path_template.replace("{prefix}", prefix), handler)
|
||||||
|
|
||||||
|
def _bind_route(self, method: str, path: str, handler: Callable) -> None:
|
||||||
|
add_method_name = self._METHOD_MAP[method.upper()]
|
||||||
|
add_method = getattr(self._app.router, add_method_name)
|
||||||
|
add_method(path, handler)
|
||||||
|
|
||||||
25
py/routes/preview_routes.py
Normal file
25
py/routes/preview_routes.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""Route controller for preview asset delivery."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from .handlers.preview_handlers import PreviewHandler
|
||||||
|
|
||||||
|
|
||||||
|
class PreviewRoutes:
|
||||||
|
"""Register routes that expose preview assets."""
|
||||||
|
|
||||||
|
def __init__(self, *, handler: PreviewHandler | None = None) -> None:
|
||||||
|
self._handler = handler or PreviewHandler()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_routes(cls, app: web.Application) -> None:
|
||||||
|
controller = cls()
|
||||||
|
controller.register(app)
|
||||||
|
|
||||||
|
def register(self, app: web.Application) -> None:
|
||||||
|
app.router.add_get('/api/lm/previews', self._handler.serve_preview)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["PreviewRoutes"]
|
||||||
64
py/routes/recipe_route_registrar.py
Normal file
64
py/routes/recipe_route_registrar.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
"""Route registrar for recipe endpoints."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, Mapping
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RouteDefinition:
|
||||||
|
"""Declarative definition for a recipe HTTP route."""
|
||||||
|
|
||||||
|
method: str
|
||||||
|
path: str
|
||||||
|
handler_name: str
|
||||||
|
|
||||||
|
|
||||||
|
ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||||
|
RouteDefinition("GET", "/loras/recipes", "render_page"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes", "list_recipes"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}", "get_recipe"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/analyze-image", "analyze_uploaded_image"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/analyze-local-image", "analyze_local_image"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/save", "save_recipe"),
|
||||||
|
RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/syntax", "get_recipe_syntax"),
|
||||||
|
RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeRouteRegistrar:
|
||||||
|
"""Bind declarative recipe definitions to an aiohttp router."""
|
||||||
|
|
||||||
|
_METHOD_MAP = {
|
||||||
|
"GET": "add_get",
|
||||||
|
"POST": "add_post",
|
||||||
|
"PUT": "add_put",
|
||||||
|
"DELETE": "add_delete",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, app: web.Application) -> None:
|
||||||
|
self._app = app
|
||||||
|
|
||||||
|
def register_routes(self, handler_lookup: Mapping[str, Callable[[web.Request], object]]) -> None:
|
||||||
|
for definition in ROUTE_DEFINITIONS:
|
||||||
|
handler = handler_lookup[definition.handler_name]
|
||||||
|
self._bind_route(definition.method, definition.path, handler)
|
||||||
|
|
||||||
|
def _bind_route(self, method: str, path: str, handler: Callable) -> None:
|
||||||
|
add_method_name = self._METHOD_MAP[method.upper()]
|
||||||
|
add_method = getattr(self._app.router, add_method_name)
|
||||||
|
add_method(path, handler)
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -507,12 +507,12 @@ class StatsRoutes:
|
|||||||
app.router.add_get('/statistics', self.handle_stats_page)
|
app.router.add_get('/statistics', self.handle_stats_page)
|
||||||
|
|
||||||
# Register API routes
|
# Register API routes
|
||||||
app.router.add_get('/api/stats/collection-overview', self.get_collection_overview)
|
app.router.add_get('/api/lm/stats/collection-overview', self.get_collection_overview)
|
||||||
app.router.add_get('/api/stats/usage-analytics', self.get_usage_analytics)
|
app.router.add_get('/api/lm/stats/usage-analytics', self.get_usage_analytics)
|
||||||
app.router.add_get('/api/stats/base-model-distribution', self.get_base_model_distribution)
|
app.router.add_get('/api/lm/stats/base-model-distribution', self.get_base_model_distribution)
|
||||||
app.router.add_get('/api/stats/tag-analytics', self.get_tag_analytics)
|
app.router.add_get('/api/lm/stats/tag-analytics', self.get_tag_analytics)
|
||||||
app.router.add_get('/api/stats/storage-analytics', self.get_storage_analytics)
|
app.router.add_get('/api/lm/stats/storage-analytics', self.get_storage_analytics)
|
||||||
app.router.add_get('/api/stats/insights', self.get_insights)
|
app.router.add_get('/api/lm/stats/insights', self.get_insights)
|
||||||
|
|
||||||
async def _on_startup(self, app):
|
async def _on_startup(self, app):
|
||||||
"""Initialize services when the app starts"""
|
"""Initialize services when the app starts"""
|
||||||
|
|||||||
@@ -5,21 +5,27 @@ import git
|
|||||||
import zipfile
|
import zipfile
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from aiohttp import web
|
import asyncio
|
||||||
|
from aiohttp import web, ClientError
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
from ..services.downloader import get_downloader, Downloader
|
|
||||||
|
from ..utils.settings_paths import ensure_settings_file
|
||||||
|
from ..services.downloader import get_downloader
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
NETWORK_EXCEPTIONS = (ClientError, OSError, asyncio.TimeoutError)
|
||||||
|
|
||||||
|
|
||||||
class UpdateRoutes:
|
class UpdateRoutes:
|
||||||
"""Routes for handling plugin update checks"""
|
"""Routes for handling plugin update checks"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setup_routes(app):
|
def setup_routes(app):
|
||||||
"""Register update check routes"""
|
"""Register update check routes"""
|
||||||
app.router.add_get('/api/check-updates', UpdateRoutes.check_updates)
|
app.router.add_get('/api/lm/check-updates', UpdateRoutes.check_updates)
|
||||||
app.router.add_get('/api/version-info', UpdateRoutes.get_version_info)
|
app.router.add_get('/api/lm/version-info', UpdateRoutes.get_version_info)
|
||||||
app.router.add_post('/api/perform-update', UpdateRoutes.perform_update)
|
app.router.add_post('/api/lm/perform-update', UpdateRoutes.perform_update)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def check_updates(request):
|
async def check_updates(request):
|
||||||
@@ -63,6 +69,12 @@ class UpdateRoutes:
|
|||||||
'nightly': nightly
|
'nightly': nightly
|
||||||
})
|
})
|
||||||
|
|
||||||
|
except NETWORK_EXCEPTIONS as e:
|
||||||
|
logger.warning("Network unavailable during update check: %s", e)
|
||||||
|
return web.json_response({
|
||||||
|
'success': False,
|
||||||
|
'error': 'Network unavailable for update check'
|
||||||
|
})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to check for updates: {e}", exc_info=True)
|
logger.error(f"Failed to check for updates: {e}", exc_info=True)
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
@@ -111,7 +123,7 @@ class UpdateRoutes:
|
|||||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
plugin_root = os.path.dirname(os.path.dirname(current_dir))
|
plugin_root = os.path.dirname(os.path.dirname(current_dir))
|
||||||
|
|
||||||
settings_path = os.path.join(plugin_root, 'settings.json')
|
settings_path = ensure_settings_file(logger)
|
||||||
settings_backup = None
|
settings_backup = None
|
||||||
if os.path.exists(settings_path):
|
if os.path.exists(settings_path):
|
||||||
with open(settings_path, 'r', encoding='utf-8') as f:
|
with open(settings_path, 'r', encoding='utf-8') as f:
|
||||||
@@ -265,7 +277,7 @@ class UpdateRoutes:
|
|||||||
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/commits/main"
|
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/commits/main"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
downloader = await Downloader.get_instance()
|
downloader = await get_downloader()
|
||||||
success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
|
success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
@@ -283,6 +295,9 @@ class UpdateRoutes:
|
|||||||
|
|
||||||
return version, changelog
|
return version, changelog
|
||||||
|
|
||||||
|
except NETWORK_EXCEPTIONS as e:
|
||||||
|
logger.warning("Unable to reach GitHub for nightly version: %s", e)
|
||||||
|
return "main", []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error fetching nightly version: {e}", exc_info=True)
|
logger.error(f"Error fetching nightly version: {e}", exc_info=True)
|
||||||
return "main", []
|
return "main", []
|
||||||
@@ -431,7 +446,7 @@ class UpdateRoutes:
|
|||||||
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
|
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
downloader = await Downloader.get_instance()
|
downloader = await get_downloader()
|
||||||
success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
|
success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
@@ -448,6 +463,9 @@ class UpdateRoutes:
|
|||||||
|
|
||||||
return version, changelog
|
return version, changelog
|
||||||
|
|
||||||
|
except NETWORK_EXCEPTIONS as e:
|
||||||
|
logger.warning("Unable to reach GitHub for release info: %s", e)
|
||||||
|
return "v0.0.0", []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error fetching remote version: {e}", exc_info=True)
|
logger.error(f"Error fetching remote version: {e}", exc_info=True)
|
||||||
return "v0.0.0", []
|
return "v0.0.0", []
|
||||||
|
|||||||
@@ -4,99 +4,89 @@ import logging
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from ..utils.models import BaseModelMetadata
|
from ..utils.models import BaseModelMetadata
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
from ..utils.metadata_manager import MetadataManager
|
||||||
from ..utils.constants import NSFW_LEVELS
|
from .model_query import FilterCriteria, ModelCacheRepository, ModelFilterSet, SearchStrategy, SettingsProvider
|
||||||
from .settings_manager import settings
|
from .settings_manager import settings as default_settings
|
||||||
from ..utils.utils import fuzzy_match
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class BaseModelService(ABC):
|
class BaseModelService(ABC):
|
||||||
"""Base service class for all model types"""
|
"""Base service class for all model types"""
|
||||||
|
|
||||||
def __init__(self, model_type: str, scanner, metadata_class: Type[BaseModelMetadata]):
|
def __init__(
|
||||||
"""Initialize the service
|
self,
|
||||||
|
model_type: str,
|
||||||
|
scanner,
|
||||||
|
metadata_class: Type[BaseModelMetadata],
|
||||||
|
*,
|
||||||
|
cache_repository: Optional[ModelCacheRepository] = None,
|
||||||
|
filter_set: Optional[ModelFilterSet] = None,
|
||||||
|
search_strategy: Optional[SearchStrategy] = None,
|
||||||
|
settings_provider: Optional[SettingsProvider] = None,
|
||||||
|
):
|
||||||
|
"""Initialize the service.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
model_type: Type of model (lora, checkpoint, etc.)
|
model_type: Type of model (lora, checkpoint, etc.).
|
||||||
scanner: Model scanner instance
|
scanner: Model scanner instance.
|
||||||
metadata_class: Metadata class for this model type
|
metadata_class: Metadata class for this model type.
|
||||||
|
cache_repository: Custom repository for cache access (primarily for tests).
|
||||||
|
filter_set: Filter component controlling folder/tag/favorites logic.
|
||||||
|
search_strategy: Search component for fuzzy/text matching.
|
||||||
|
settings_provider: Settings object; defaults to the global settings manager.
|
||||||
"""
|
"""
|
||||||
self.model_type = model_type
|
self.model_type = model_type
|
||||||
self.scanner = scanner
|
self.scanner = scanner
|
||||||
self.metadata_class = metadata_class
|
self.metadata_class = metadata_class
|
||||||
|
self.settings = settings_provider or default_settings
|
||||||
|
self.cache_repository = cache_repository or ModelCacheRepository(scanner)
|
||||||
|
self.filter_set = filter_set or ModelFilterSet(self.settings)
|
||||||
|
self.search_strategy = search_strategy or SearchStrategy()
|
||||||
|
|
||||||
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'name',
|
async def get_paginated_data(
|
||||||
folder: str = None, search: str = None, fuzzy_search: bool = False,
|
self,
|
||||||
base_models: list = None, tags: list = None,
|
page: int,
|
||||||
search_options: dict = None, hash_filters: dict = None,
|
page_size: int,
|
||||||
favorites_only: bool = False, **kwargs) -> Dict:
|
sort_by: str = 'name',
|
||||||
"""Get paginated and filtered model data
|
folder: str = None,
|
||||||
|
search: str = None,
|
||||||
Args:
|
fuzzy_search: bool = False,
|
||||||
page: Page number (1-based)
|
base_models: list = None,
|
||||||
page_size: Number of items per page
|
tags: list = None,
|
||||||
sort_by: Sort criteria, e.g. 'name', 'name:asc', 'name:desc', 'date', 'date:asc', 'date:desc'
|
search_options: dict = None,
|
||||||
folder: Folder filter
|
hash_filters: dict = None,
|
||||||
search: Search term
|
favorites_only: bool = False,
|
||||||
fuzzy_search: Whether to use fuzzy search
|
**kwargs,
|
||||||
base_models: List of base models to filter by
|
) -> Dict:
|
||||||
tags: List of tags to filter by
|
"""Get paginated and filtered model data"""
|
||||||
search_options: Search options dict
|
sort_params = self.cache_repository.parse_sort(sort_by)
|
||||||
hash_filters: Hash filtering options
|
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
|
||||||
favorites_only: Filter for favorites only
|
|
||||||
**kwargs: Additional model-specific filters
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict containing paginated results
|
|
||||||
"""
|
|
||||||
cache = await self.scanner.get_cached_data()
|
|
||||||
|
|
||||||
# Parse sort_by into sort_key and order
|
|
||||||
if ':' in sort_by:
|
|
||||||
sort_key, order = sort_by.split(':', 1)
|
|
||||||
sort_key = sort_key.strip()
|
|
||||||
order = order.strip().lower()
|
|
||||||
if order not in ('asc', 'desc'):
|
|
||||||
order = 'asc'
|
|
||||||
else:
|
|
||||||
sort_key = sort_by.strip()
|
|
||||||
order = 'asc'
|
|
||||||
|
|
||||||
# Get default search options if not provided
|
|
||||||
if search_options is None:
|
|
||||||
search_options = {
|
|
||||||
'filename': True,
|
|
||||||
'modelname': True,
|
|
||||||
'tags': False,
|
|
||||||
'recursive': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get the base data set using new sort logic
|
|
||||||
filtered_data = await cache.get_sorted_data(sort_key, order)
|
|
||||||
|
|
||||||
# Apply hash filtering if provided (highest priority)
|
|
||||||
if hash_filters:
|
if hash_filters:
|
||||||
filtered_data = await self._apply_hash_filters(filtered_data, hash_filters)
|
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
|
||||||
|
|
||||||
# Jump to pagination for hash filters
|
|
||||||
return self._paginate(filtered_data, page, page_size)
|
return self._paginate(filtered_data, page, page_size)
|
||||||
|
|
||||||
# Apply common filters
|
|
||||||
filtered_data = await self._apply_common_filters(
|
filtered_data = await self._apply_common_filters(
|
||||||
filtered_data, folder, base_models, tags, favorites_only, search_options
|
sorted_data,
|
||||||
|
folder=folder,
|
||||||
|
base_models=base_models,
|
||||||
|
tags=tags,
|
||||||
|
favorites_only=favorites_only,
|
||||||
|
search_options=search_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply search filtering
|
|
||||||
if search:
|
if search:
|
||||||
filtered_data = await self._apply_search_filters(
|
filtered_data = await self._apply_search_filters(
|
||||||
filtered_data, search, fuzzy_search, search_options
|
filtered_data,
|
||||||
|
search,
|
||||||
|
fuzzy_search,
|
||||||
|
search_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply model-specific filters
|
|
||||||
filtered_data = await self._apply_specific_filters(filtered_data, **kwargs)
|
filtered_data = await self._apply_specific_filters(filtered_data, **kwargs)
|
||||||
|
|
||||||
return self._paginate(filtered_data, page, page_size)
|
return self._paginate(filtered_data, page, page_size)
|
||||||
|
|
||||||
|
|
||||||
async def _apply_hash_filters(self, data: List[Dict], hash_filters: Dict) -> List[Dict]:
|
async def _apply_hash_filters(self, data: List[Dict], hash_filters: Dict) -> List[Dict]:
|
||||||
"""Apply hash-based filtering"""
|
"""Apply hash-based filtering"""
|
||||||
@@ -120,113 +110,36 @@ class BaseModelService(ABC):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
async def _apply_common_filters(self, data: List[Dict], folder: str = None,
|
async def _apply_common_filters(
|
||||||
base_models: list = None, tags: list = None,
|
self,
|
||||||
favorites_only: bool = False, search_options: dict = None) -> List[Dict]:
|
data: List[Dict],
|
||||||
|
folder: str = None,
|
||||||
|
base_models: list = None,
|
||||||
|
tags: list = None,
|
||||||
|
favorites_only: bool = False,
|
||||||
|
search_options: dict = None,
|
||||||
|
) -> List[Dict]:
|
||||||
"""Apply common filters that work across all model types"""
|
"""Apply common filters that work across all model types"""
|
||||||
# Apply SFW filtering if enabled in settings
|
normalized_options = self.search_strategy.normalize_options(search_options)
|
||||||
if settings.get('show_only_sfw', False):
|
criteria = FilterCriteria(
|
||||||
data = [
|
folder=folder,
|
||||||
item for item in data
|
base_models=base_models,
|
||||||
if not item.get('preview_nsfw_level') or item.get('preview_nsfw_level') < NSFW_LEVELS['R']
|
tags=tags,
|
||||||
]
|
favorites_only=favorites_only,
|
||||||
|
search_options=normalized_options,
|
||||||
# Apply favorites filtering if enabled
|
)
|
||||||
if favorites_only:
|
return self.filter_set.apply(data, criteria)
|
||||||
data = [
|
|
||||||
item for item in data
|
|
||||||
if item.get('favorite', False) is True
|
|
||||||
]
|
|
||||||
|
|
||||||
# Apply folder filtering
|
|
||||||
if folder is not None:
|
|
||||||
if search_options and search_options.get('recursive', True):
|
|
||||||
# Recursive folder filtering - include all subfolders
|
|
||||||
# Ensure we match exact folder or its subfolders by checking path boundaries
|
|
||||||
if folder == "":
|
|
||||||
# Empty folder means root - include all items
|
|
||||||
pass # Don't filter anything
|
|
||||||
else:
|
|
||||||
# Add trailing slash to ensure we match folder boundaries correctly
|
|
||||||
folder_with_separator = folder + "/"
|
|
||||||
data = [
|
|
||||||
item for item in data
|
|
||||||
if (item['folder'] == folder or
|
|
||||||
item['folder'].startswith(folder_with_separator))
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
# Exact folder filtering
|
|
||||||
data = [
|
|
||||||
item for item in data
|
|
||||||
if item['folder'] == folder
|
|
||||||
]
|
|
||||||
|
|
||||||
# Apply base model filtering
|
|
||||||
if base_models and len(base_models) > 0:
|
|
||||||
data = [
|
|
||||||
item for item in data
|
|
||||||
if item.get('base_model') in base_models
|
|
||||||
]
|
|
||||||
|
|
||||||
# Apply tag filtering
|
|
||||||
if tags and len(tags) > 0:
|
|
||||||
data = [
|
|
||||||
item for item in data
|
|
||||||
if any(tag in item.get('tags', []) for tag in tags)
|
|
||||||
]
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
async def _apply_search_filters(self, data: List[Dict], search: str,
|
async def _apply_search_filters(
|
||||||
fuzzy_search: bool, search_options: dict) -> List[Dict]:
|
self,
|
||||||
|
data: List[Dict],
|
||||||
|
search: str,
|
||||||
|
fuzzy_search: bool,
|
||||||
|
search_options: dict,
|
||||||
|
) -> List[Dict]:
|
||||||
"""Apply search filtering"""
|
"""Apply search filtering"""
|
||||||
search_results = []
|
normalized_options = self.search_strategy.normalize_options(search_options)
|
||||||
|
return self.search_strategy.apply(data, search, normalized_options, fuzzy_search)
|
||||||
for item in data:
|
|
||||||
# Search by file name
|
|
||||||
if search_options.get('filename', True):
|
|
||||||
if fuzzy_search:
|
|
||||||
if fuzzy_match(item.get('file_name', ''), search):
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
elif search.lower() in item.get('file_name', '').lower():
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search by model name
|
|
||||||
if search_options.get('modelname', True):
|
|
||||||
if fuzzy_search:
|
|
||||||
if fuzzy_match(item.get('model_name', ''), search):
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
elif search.lower() in item.get('model_name', '').lower():
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search by tags
|
|
||||||
if search_options.get('tags', False) and 'tags' in item:
|
|
||||||
if any((fuzzy_match(tag, search) if fuzzy_search else search.lower() in tag.lower())
|
|
||||||
for tag in item['tags']):
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search by creator
|
|
||||||
civitai = item.get('civitai')
|
|
||||||
creator_username = ''
|
|
||||||
if civitai and isinstance(civitai, dict):
|
|
||||||
creator = civitai.get('creator')
|
|
||||||
if creator and isinstance(creator, dict):
|
|
||||||
creator_username = creator.get('username', '')
|
|
||||||
if search_options.get('creator', False) and creator_username:
|
|
||||||
if fuzzy_search:
|
|
||||||
if fuzzy_match(creator_username, search):
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
elif search.lower() in creator_username.lower():
|
|
||||||
search_results.append(item)
|
|
||||||
continue
|
|
||||||
|
|
||||||
return search_results
|
|
||||||
|
|
||||||
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:
|
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:
|
||||||
"""Apply model-specific filters - to be overridden by subclasses if needed"""
|
"""Apply model-specific filters - to be overridden by subclasses if needed"""
|
||||||
@@ -284,6 +197,18 @@ class BaseModelService(ABC):
|
|||||||
"""Get model root directories"""
|
"""Get model root directories"""
|
||||||
return self.scanner.get_model_roots()
|
return self.scanner.get_model_roots()
|
||||||
|
|
||||||
|
def filter_civitai_data(self, data: Dict, minimal: bool = False) -> Dict:
|
||||||
|
"""Filter relevant fields from CivitAI data"""
|
||||||
|
if not data:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
fields = ["id", "modelId", "name", "trainedWords"] if minimal else [
|
||||||
|
"id", "modelId", "name", "createdAt", "updatedAt",
|
||||||
|
"publishedAt", "trainedWords", "baseModel", "description",
|
||||||
|
"model", "images", "customImages", "creator"
|
||||||
|
]
|
||||||
|
return {k: data[k] for k in fields if k in data}
|
||||||
|
|
||||||
async def get_folder_tree(self, model_root: str) -> Dict:
|
async def get_folder_tree(self, model_root: str) -> Dict:
|
||||||
"""Get hierarchical folder tree for a specific model root"""
|
"""Get hierarchical folder tree for a specific model root"""
|
||||||
cache = await self.scanner.get_cached_data()
|
cache = await self.scanner.get_cached_data()
|
||||||
@@ -389,24 +314,24 @@ class BaseModelService(ABC):
|
|||||||
return {'civitai_url': None, 'model_id': None, 'version_id': None}
|
return {'civitai_url': None, 'model_id': None, 'version_id': None}
|
||||||
|
|
||||||
async def get_model_metadata(self, file_path: str) -> Optional[Dict]:
|
async def get_model_metadata(self, file_path: str) -> Optional[Dict]:
|
||||||
"""Get filtered CivitAI metadata for a model by file path"""
|
"""Load full metadata for a single model.
|
||||||
cache = await self.scanner.get_cached_data()
|
|
||||||
|
Listing/search endpoints return lightweight cache entries; this method performs
|
||||||
for model in cache.raw_data:
|
a lazy read of the on-disk metadata snapshot when callers need full detail.
|
||||||
if model.get('file_path') == file_path:
|
"""
|
||||||
return ModelRouteUtils.filter_civitai_data(model.get("civitai", {}))
|
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.metadata_class)
|
||||||
|
if should_skip or metadata is None:
|
||||||
return None
|
return None
|
||||||
|
return self.filter_civitai_data(metadata.to_dict().get("civitai", {}))
|
||||||
|
|
||||||
|
|
||||||
async def get_model_description(self, file_path: str) -> Optional[str]:
|
async def get_model_description(self, file_path: str) -> Optional[str]:
|
||||||
"""Get model description by file path"""
|
"""Return the stored modelDescription field for a model."""
|
||||||
cache = await self.scanner.get_cached_data()
|
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.metadata_class)
|
||||||
|
if should_skip or metadata is None:
|
||||||
for model in cache.raw_data:
|
return None
|
||||||
if model.get('file_path') == file_path:
|
return metadata.modelDescription or ''
|
||||||
return model.get('modelDescription', '')
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def search_relative_paths(self, search_term: str, limit: int = 15) -> List[str]:
|
async def search_relative_paths(self, search_term: str, limit: int = 15) -> List[str]:
|
||||||
"""Search model relative file paths for autocomplete functionality"""
|
"""Search model relative file paths for autocomplete functionality"""
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict
|
||||||
|
|
||||||
from .base_model_service import BaseModelService
|
from .base_model_service import BaseModelService
|
||||||
from ..utils.models import CheckpointMetadata
|
from ..utils.models import CheckpointMetadata
|
||||||
from ..config import config
|
from ..config import config
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -38,7 +37,7 @@ class CheckpointService(BaseModelService):
|
|||||||
"notes": checkpoint_data.get("notes", ""),
|
"notes": checkpoint_data.get("notes", ""),
|
||||||
"model_type": checkpoint_data.get("model_type", "checkpoint"),
|
"model_type": checkpoint_data.get("model_type", "checkpoint"),
|
||||||
"favorite": checkpoint_data.get("favorite", False),
|
"favorite": checkpoint_data.get("favorite", False),
|
||||||
"civitai": ModelRouteUtils.filter_civitai_data(checkpoint_data.get("civitai", {}), minimal=True)
|
"civitai": self.filter_civitai_data(checkpoint_data.get("civitai", {}), minimal=True)
|
||||||
}
|
}
|
||||||
|
|
||||||
def find_duplicate_hashes(self) -> Dict:
|
def find_duplicate_hashes(self) -> Dict:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from datetime import datetime
|
|
||||||
import os
|
import os
|
||||||
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
from typing import Optional, Dict, Tuple, List
|
from typing import Optional, Dict, Tuple, List
|
||||||
@@ -32,6 +32,24 @@ class CivitaiClient:
|
|||||||
self._initialized = True
|
self._initialized = True
|
||||||
|
|
||||||
self.base_url = "https://civitai.com/api/v1"
|
self.base_url = "https://civitai.com/api/v1"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _remove_comfy_metadata(model_version: Optional[Dict]) -> None:
|
||||||
|
"""Remove Comfy-specific metadata from model version images."""
|
||||||
|
if not isinstance(model_version, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
images = model_version.get("images")
|
||||||
|
if not isinstance(images, list):
|
||||||
|
return
|
||||||
|
|
||||||
|
for image in images:
|
||||||
|
if not isinstance(image, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
meta = image.get("meta")
|
||||||
|
if isinstance(meta, dict) and "comfy" in meta:
|
||||||
|
meta.pop("comfy", None)
|
||||||
|
|
||||||
async def download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
|
async def download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
|
||||||
"""Download file with resumable downloads and retry mechanism
|
"""Download file with resumable downloads and retry mechanism
|
||||||
@@ -59,17 +77,17 @@ class CivitaiClient:
|
|||||||
|
|
||||||
return success, result
|
return success, result
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
try:
|
try:
|
||||||
downloader = await get_downloader()
|
downloader = await get_downloader()
|
||||||
success, version = await downloader.make_request(
|
success, result = await downloader.make_request(
|
||||||
'GET',
|
'GET',
|
||||||
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
||||||
use_auth=True
|
use_auth=True
|
||||||
)
|
)
|
||||||
if success:
|
if success:
|
||||||
# Get model ID from version data
|
# Get model ID from version data
|
||||||
model_id = version.get('modelId')
|
model_id = result.get('modelId')
|
||||||
if model_id:
|
if model_id:
|
||||||
# Fetch additional model metadata
|
# Fetch additional model metadata
|
||||||
success_model, data = await downloader.make_request(
|
success_model, data = await downloader.make_request(
|
||||||
@@ -79,17 +97,25 @@ class CivitaiClient:
|
|||||||
)
|
)
|
||||||
if success_model:
|
if success_model:
|
||||||
# Enrich version_info with model data
|
# Enrich version_info with model data
|
||||||
version['model']['description'] = data.get("description")
|
result['model']['description'] = data.get("description")
|
||||||
version['model']['tags'] = data.get("tags", [])
|
result['model']['tags'] = data.get("tags", [])
|
||||||
|
|
||||||
# Add creator from model data
|
# Add creator from model data
|
||||||
version['creator'] = data.get("creator")
|
result['creator'] = data.get("creator")
|
||||||
|
|
||||||
return version
|
self._remove_comfy_metadata(result)
|
||||||
return None
|
return result, None
|
||||||
|
|
||||||
|
# Handle specific error cases
|
||||||
|
if "not found" in str(result):
|
||||||
|
return None, "Model not found"
|
||||||
|
|
||||||
|
# Other error cases
|
||||||
|
logger.error(f"Failed to fetch model info for {model_hash[:10]}: {result}")
|
||||||
|
return None, str(result)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"API Error: {str(e)}")
|
logger.error(f"API Error: {str(e)}")
|
||||||
return None
|
return None, str(e)
|
||||||
|
|
||||||
async def download_preview_image(self, image_url: str, save_path: str):
|
async def download_preview_image(self, image_url: str, save_path: str):
|
||||||
try:
|
try:
|
||||||
@@ -122,7 +148,8 @@ class CivitaiClient:
|
|||||||
# Also return model type along with versions
|
# Also return model type along with versions
|
||||||
return {
|
return {
|
||||||
'modelVersions': result.get('modelVersions', []),
|
'modelVersions': result.get('modelVersions', []),
|
||||||
'type': result.get('type', '')
|
'type': result.get('type', ''),
|
||||||
|
'name': result.get('name', '')
|
||||||
}
|
}
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -169,7 +196,8 @@ class CivitaiClient:
|
|||||||
version['model']['description'] = model_data.get("description")
|
version['model']['description'] = model_data.get("description")
|
||||||
version['model']['tags'] = model_data.get("tags", [])
|
version['model']['tags'] = model_data.get("tags", [])
|
||||||
version['creator'] = model_data.get("creator")
|
version['creator'] = model_data.get("creator")
|
||||||
|
|
||||||
|
self._remove_comfy_metadata(version)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
# Case 2: model_id is provided (with or without version_id)
|
# Case 2: model_id is provided (with or without version_id)
|
||||||
@@ -182,31 +210,77 @@ class CivitaiClient:
|
|||||||
)
|
)
|
||||||
if not success:
|
if not success:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
model_versions = data.get('modelVersions', [])
|
model_versions = data.get('modelVersions', [])
|
||||||
|
if not model_versions:
|
||||||
# Step 2: Determine the version_id to use
|
logger.warning(f"No model versions found for model {model_id}")
|
||||||
target_version_id = version_id
|
|
||||||
if target_version_id is None:
|
|
||||||
target_version_id = model_versions[0].get('id')
|
|
||||||
|
|
||||||
# Step 3: Get detailed version info using the version_id
|
|
||||||
success, version = await downloader.make_request(
|
|
||||||
'GET',
|
|
||||||
f"{self.base_url}/model-versions/{target_version_id}",
|
|
||||||
use_auth=True
|
|
||||||
)
|
|
||||||
if not success:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Step 2: Determine the target version entry to use
|
||||||
|
target_version = None
|
||||||
|
if version_id is not None:
|
||||||
|
target_version = next(
|
||||||
|
(item for item in model_versions if item.get('id') == version_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if target_version is None:
|
||||||
|
logger.warning(
|
||||||
|
f"Version {version_id} not found for model {model_id}, defaulting to first version"
|
||||||
|
)
|
||||||
|
if target_version is None:
|
||||||
|
target_version = model_versions[0]
|
||||||
|
|
||||||
|
target_version_id = target_version.get('id')
|
||||||
|
|
||||||
|
# Step 3: Get detailed version info using the SHA256 hash
|
||||||
|
model_hash = None
|
||||||
|
for file_info in target_version.get('files', []):
|
||||||
|
if file_info.get('type') == 'Model' and file_info.get('primary'):
|
||||||
|
model_hash = file_info.get('hashes', {}).get('SHA256')
|
||||||
|
if model_hash:
|
||||||
|
break
|
||||||
|
|
||||||
|
version = None
|
||||||
|
if model_hash:
|
||||||
|
success, version = await downloader.make_request(
|
||||||
|
'GET',
|
||||||
|
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
||||||
|
use_auth=True
|
||||||
|
)
|
||||||
|
if not success:
|
||||||
|
logger.warning(
|
||||||
|
f"Failed to fetch version by hash for model {model_id} version {target_version_id}: {version}"
|
||||||
|
)
|
||||||
|
version = None
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"No primary model hash found for model {model_id} version {target_version_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if version is None:
|
||||||
|
version = copy.deepcopy(target_version)
|
||||||
|
version.pop('index', None)
|
||||||
|
version['modelId'] = model_id
|
||||||
|
version['model'] = {
|
||||||
|
'name': data.get('name'),
|
||||||
|
'type': data.get('type'),
|
||||||
|
'nsfw': data.get('nsfw'),
|
||||||
|
'poi': data.get('poi')
|
||||||
|
}
|
||||||
|
|
||||||
# Step 4: Enrich version_info with model data
|
# Step 4: Enrich version_info with model data
|
||||||
# Add description and tags from model data
|
# Add description and tags from model data
|
||||||
version['model']['description'] = data.get("description")
|
model_info = version.get('model')
|
||||||
version['model']['tags'] = data.get("tags", [])
|
if not isinstance(model_info, dict):
|
||||||
|
model_info = {}
|
||||||
|
version['model'] = model_info
|
||||||
|
model_info['description'] = data.get("description")
|
||||||
|
model_info['tags'] = data.get("tags", [])
|
||||||
|
|
||||||
# Add creator from model data
|
# Add creator from model data
|
||||||
version['creator'] = data.get("creator")
|
version['creator'] = data.get("creator")
|
||||||
|
|
||||||
|
self._remove_comfy_metadata(version)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
# Case 3: Neither model_id nor version_id provided
|
# Case 3: Neither model_id nor version_id provided
|
||||||
@@ -242,11 +316,12 @@ class CivitaiClient:
|
|||||||
|
|
||||||
if success:
|
if success:
|
||||||
logger.debug(f"Successfully fetched model version info for: {version_id}")
|
logger.debug(f"Successfully fetched model version info for: {version_id}")
|
||||||
|
self._remove_comfy_metadata(result)
|
||||||
return result, None
|
return result, None
|
||||||
|
|
||||||
# Handle specific error cases
|
# Handle specific error cases
|
||||||
if "404" in str(result):
|
if "not found" in str(result):
|
||||||
error_msg = f"Model not found (status 404)"
|
error_msg = f"Model not found"
|
||||||
logger.warning(f"Model version not found: {version_id} - {error_msg}")
|
logger.warning(f"Model version not found: {version_id} - {error_msg}")
|
||||||
return None, error_msg
|
return None, error_msg
|
||||||
|
|
||||||
@@ -258,59 +333,6 @@ class CivitaiClient:
|
|||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
return None, error_msg
|
return None, error_msg
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
"""Fetch model metadata (description, tags, and creator info) from Civitai API
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model_id: The Civitai model ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple[Optional[Dict], int]: A tuple containing:
|
|
||||||
- A dictionary with model metadata or None if not found
|
|
||||||
- The HTTP status code from the request (0 for exceptions)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
downloader = await get_downloader()
|
|
||||||
url = f"{self.base_url}/models/{model_id}"
|
|
||||||
|
|
||||||
success, result = await downloader.make_request(
|
|
||||||
'GET',
|
|
||||||
url,
|
|
||||||
use_auth=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
# Try to extract status code from error message
|
|
||||||
status_code = 0
|
|
||||||
if "404" in str(result):
|
|
||||||
status_code = 404
|
|
||||||
elif "401" in str(result):
|
|
||||||
status_code = 401
|
|
||||||
elif "403" in str(result):
|
|
||||||
status_code = 403
|
|
||||||
logger.warning(f"Failed to fetch model metadata: {result}")
|
|
||||||
return None, status_code
|
|
||||||
|
|
||||||
# Extract relevant metadata
|
|
||||||
metadata = {
|
|
||||||
"description": result.get("description") or "No model description available",
|
|
||||||
"tags": result.get("tags", []),
|
|
||||||
"creator": {
|
|
||||||
"username": result.get("creator", {}).get("username"),
|
|
||||||
"image": result.get("creator", {}).get("image")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if metadata["description"] or metadata["tags"] or metadata["creator"]["username"]:
|
|
||||||
return metadata, 200
|
|
||||||
else:
|
|
||||||
logger.warning(f"No metadata found for model {model_id}")
|
|
||||||
return None, 200
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching model metadata: {e}", exc_info=True)
|
|
||||||
return None, 0
|
|
||||||
|
|
||||||
async def get_image_info(self, image_id: str) -> Optional[Dict]:
|
async def get_image_info(self, image_id: str) -> Optional[Dict]:
|
||||||
"""Fetch image information from Civitai API
|
"""Fetch image information from Civitai API
|
||||||
|
|
||||||
|
|||||||
100
py/services/download_coordinator.py
Normal file
100
py/services/download_coordinator.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"""Service wrapper for coordinating download lifecycle events."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadCoordinator:
|
||||||
|
"""Manage download scheduling, cancellation and introspection."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ws_manager,
|
||||||
|
download_manager_factory: Callable[[], Awaitable],
|
||||||
|
) -> None:
|
||||||
|
self._ws_manager = ws_manager
|
||||||
|
self._download_manager_factory = download_manager_factory
|
||||||
|
|
||||||
|
async def schedule_download(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Schedule a download using the provided payload."""
|
||||||
|
|
||||||
|
download_manager = await self._download_manager_factory()
|
||||||
|
|
||||||
|
download_id = payload.get("download_id") or self._ws_manager.generate_download_id()
|
||||||
|
payload.setdefault("download_id", download_id)
|
||||||
|
|
||||||
|
async def progress_callback(progress: Any) -> None:
|
||||||
|
await self._ws_manager.broadcast_download_progress(
|
||||||
|
download_id,
|
||||||
|
{
|
||||||
|
"status": "progress",
|
||||||
|
"progress": progress,
|
||||||
|
"download_id": download_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
model_id = self._parse_optional_int(payload.get("model_id"), "model_id")
|
||||||
|
model_version_id = self._parse_optional_int(
|
||||||
|
payload.get("model_version_id"), "model_version_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
if model_id is None and model_version_id is None:
|
||||||
|
raise ValueError(
|
||||||
|
"Missing required parameter: Please provide either 'model_id' or 'model_version_id'"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await download_manager.download_from_civitai(
|
||||||
|
model_id=model_id,
|
||||||
|
model_version_id=model_version_id,
|
||||||
|
save_dir=payload.get("model_root"),
|
||||||
|
relative_path=payload.get("relative_path", ""),
|
||||||
|
use_default_paths=payload.get("use_default_paths", False),
|
||||||
|
progress_callback=progress_callback,
|
||||||
|
download_id=download_id,
|
||||||
|
source=payload.get("source"),
|
||||||
|
)
|
||||||
|
|
||||||
|
result["download_id"] = download_id
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def cancel_download(self, download_id: str) -> Dict[str, Any]:
|
||||||
|
"""Cancel an active download and emit a broadcast event."""
|
||||||
|
|
||||||
|
download_manager = await self._download_manager_factory()
|
||||||
|
result = await download_manager.cancel_download(download_id)
|
||||||
|
|
||||||
|
await self._ws_manager.broadcast_download_progress(
|
||||||
|
download_id,
|
||||||
|
{
|
||||||
|
"status": "cancelled",
|
||||||
|
"progress": 0,
|
||||||
|
"download_id": download_id,
|
||||||
|
"message": "Download cancelled by user",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def list_active_downloads(self) -> Dict[str, Any]:
|
||||||
|
"""Return the active download map from the underlying manager."""
|
||||||
|
|
||||||
|
download_manager = await self._download_manager_factory()
|
||||||
|
return await download_manager.get_active_downloads()
|
||||||
|
|
||||||
|
def _parse_optional_int(self, value: Any, field: str) -> Optional[int]:
|
||||||
|
"""Parse an optional integer from user input."""
|
||||||
|
|
||||||
|
if value is None or value == "":
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return int(value)
|
||||||
|
except (TypeError, ValueError) as exc:
|
||||||
|
raise ValueError(f"Invalid {field}: Must be an integer") from exc
|
||||||
|
|
||||||
@@ -3,7 +3,7 @@ import os
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Dict
|
from typing import Dict, List
|
||||||
from ..utils.models import LoraMetadata, CheckpointMetadata, EmbeddingMetadata
|
from ..utils.models import LoraMetadata, CheckpointMetadata, EmbeddingMetadata
|
||||||
from ..utils.constants import CARD_PREVIEW_WIDTH, VALID_LORA_TYPES, CIVITAI_MODEL_TAGS
|
from ..utils.constants import CARD_PREVIEW_WIDTH, VALID_LORA_TYPES, CIVITAI_MODEL_TAGS
|
||||||
from ..utils.exif_utils import ExifUtils
|
from ..utils.exif_utils import ExifUtils
|
||||||
@@ -294,7 +294,18 @@ class DownloadManager:
|
|||||||
file_info = next((f for f in version_info.get('files', []) if f.get('primary')), None)
|
file_info = next((f for f in version_info.get('files', []) if f.get('primary')), None)
|
||||||
if not file_info:
|
if not file_info:
|
||||||
return {'success': False, 'error': 'No primary file found in metadata'}
|
return {'success': False, 'error': 'No primary file found in metadata'}
|
||||||
if not file_info.get('downloadUrl'):
|
mirrors = file_info.get('mirrors') or []
|
||||||
|
download_urls = []
|
||||||
|
if mirrors:
|
||||||
|
for mirror in mirrors:
|
||||||
|
if mirror.get('deletedAt') is None and mirror.get('url'):
|
||||||
|
download_urls.append(mirror['url'])
|
||||||
|
else:
|
||||||
|
download_url = file_info.get('downloadUrl')
|
||||||
|
if download_url:
|
||||||
|
download_urls.append(download_url)
|
||||||
|
|
||||||
|
if not download_urls:
|
||||||
return {'success': False, 'error': 'No download URL found for primary file'}
|
return {'success': False, 'error': 'No download URL found for primary file'}
|
||||||
|
|
||||||
# 3. Prepare download
|
# 3. Prepare download
|
||||||
@@ -314,7 +325,7 @@ class DownloadManager:
|
|||||||
|
|
||||||
# 6. Start download process
|
# 6. Start download process
|
||||||
result = await self._execute_download(
|
result = await self._execute_download(
|
||||||
download_url=file_info.get('downloadUrl', ''),
|
download_urls=download_urls,
|
||||||
save_dir=save_dir,
|
save_dir=save_dir,
|
||||||
metadata=metadata,
|
metadata=metadata,
|
||||||
version_info=version_info,
|
version_info=version_info,
|
||||||
@@ -388,11 +399,14 @@ class DownloadManager:
|
|||||||
formatted_path = formatted_path.replace('{base_model}', mapped_base_model)
|
formatted_path = formatted_path.replace('{base_model}', mapped_base_model)
|
||||||
formatted_path = formatted_path.replace('{first_tag}', first_tag)
|
formatted_path = formatted_path.replace('{first_tag}', first_tag)
|
||||||
formatted_path = formatted_path.replace('{author}', author)
|
formatted_path = formatted_path.replace('{author}', author)
|
||||||
|
|
||||||
|
if model_type == 'embedding':
|
||||||
|
formatted_path = formatted_path.replace(' ', '_')
|
||||||
|
|
||||||
return formatted_path
|
return formatted_path
|
||||||
|
|
||||||
async def _execute_download(self, download_url: str, save_dir: str,
|
async def _execute_download(self, download_urls: List[str], save_dir: str,
|
||||||
metadata, version_info: Dict,
|
metadata, version_info: Dict,
|
||||||
relative_path: str, progress_callback=None,
|
relative_path: str, progress_callback=None,
|
||||||
model_type: str = "lora", download_id: str = None) -> Dict:
|
model_type: str = "lora", download_id: str = None) -> Dict:
|
||||||
"""Execute the actual download process including preview images and model files"""
|
"""Execute the actual download process including preview images and model files"""
|
||||||
@@ -503,33 +517,44 @@ class DownloadManager:
|
|||||||
|
|
||||||
# Download model file with progress tracking using downloader
|
# Download model file with progress tracking using downloader
|
||||||
downloader = await get_downloader()
|
downloader = await get_downloader()
|
||||||
# Determine if the download URL is from Civitai
|
last_error = None
|
||||||
use_auth = download_url.startswith("https://civitai.com/api/download/")
|
for download_url in download_urls:
|
||||||
success, result = await downloader.download_file(
|
use_auth = download_url.startswith("https://civitai.com/api/download/")
|
||||||
download_url,
|
success, result = await downloader.download_file(
|
||||||
save_path, # Use full path instead of separate dir and filename
|
download_url,
|
||||||
progress_callback=lambda p: self._handle_download_progress(p, progress_callback),
|
save_path, # Use full path instead of separate dir and filename
|
||||||
use_auth=use_auth # Only use authentication for Civitai downloads
|
progress_callback=lambda p: self._handle_download_progress(p, progress_callback),
|
||||||
)
|
use_auth=use_auth # Only use authentication for Civitai downloads
|
||||||
|
)
|
||||||
|
|
||||||
if not success:
|
if success:
|
||||||
|
break
|
||||||
|
|
||||||
|
last_error = result
|
||||||
|
if os.path.exists(save_path):
|
||||||
|
try:
|
||||||
|
os.remove(save_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to remove incomplete file {save_path}: {e}")
|
||||||
|
else:
|
||||||
# Clean up files on failure, but preserve .part file for resume
|
# Clean up files on failure, but preserve .part file for resume
|
||||||
cleanup_files = [metadata_path]
|
cleanup_files = [metadata_path]
|
||||||
if metadata.preview_url and os.path.exists(metadata.preview_url):
|
preview_path_value = getattr(metadata, 'preview_url', None)
|
||||||
cleanup_files.append(metadata.preview_url)
|
if preview_path_value and os.path.exists(preview_path_value):
|
||||||
|
cleanup_files.append(preview_path_value)
|
||||||
|
|
||||||
for path in cleanup_files:
|
for path in cleanup_files:
|
||||||
if path and os.path.exists(path):
|
if path and os.path.exists(path):
|
||||||
try:
|
try:
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to cleanup file {path}: {e}")
|
logger.warning(f"Failed to cleanup file {path}: {e}")
|
||||||
|
|
||||||
# Log but don't remove .part file to allow resume
|
# Log but don't remove .part file to allow resume
|
||||||
if os.path.exists(part_path):
|
if os.path.exists(part_path):
|
||||||
logger.info(f"Preserving partial download for resume: {part_path}")
|
logger.info(f"Preserving partial download for resume: {part_path}")
|
||||||
|
|
||||||
return {'success': False, 'error': result}
|
return {'success': False, 'error': last_error or 'Failed to download file'}
|
||||||
|
|
||||||
# 4. Update file information (size and modified time)
|
# 4. Update file information (size and modified time)
|
||||||
metadata.update_file_info(save_path)
|
metadata.update_file_info(save_path)
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict
|
||||||
|
|
||||||
from .base_model_service import BaseModelService
|
from .base_model_service import BaseModelService
|
||||||
from ..utils.models import EmbeddingMetadata
|
from ..utils.models import EmbeddingMetadata
|
||||||
from ..config import config
|
from ..config import config
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -38,7 +37,7 @@ class EmbeddingService(BaseModelService):
|
|||||||
"notes": embedding_data.get("notes", ""),
|
"notes": embedding_data.get("notes", ""),
|
||||||
"model_type": embedding_data.get("model_type", "embedding"),
|
"model_type": embedding_data.get("model_type", "embedding"),
|
||||||
"favorite": embedding_data.get("favorite", False),
|
"favorite": embedding_data.get("favorite", False),
|
||||||
"civitai": ModelRouteUtils.filter_civitai_data(embedding_data.get("civitai", {}), minimal=True)
|
"civitai": self.filter_civitai_data(embedding_data.get("civitai", {}), minimal=True)
|
||||||
}
|
}
|
||||||
|
|
||||||
def find_duplicate_hashes(self) -> Dict:
|
def find_duplicate_hashes(self) -> Dict:
|
||||||
|
|||||||
296
py/services/example_images_cleanup_service.py
Normal file
296
py/services/example_images_cleanup_service.py
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
"""Service for cleaning up example image folders."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
|
from .service_registry import ServiceRegistry
|
||||||
|
from .settings_manager import settings
|
||||||
|
from ..utils.example_images_paths import iter_library_roots
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class CleanupResult:
|
||||||
|
"""Structured result returned from cleanup operations."""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
checked_folders: int
|
||||||
|
moved_empty_folders: int
|
||||||
|
moved_orphaned_folders: int
|
||||||
|
skipped_non_hash: int
|
||||||
|
move_failures: int
|
||||||
|
errors: List[str]
|
||||||
|
deleted_root: str | None
|
||||||
|
partial_success: bool
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, object]:
|
||||||
|
"""Convert the dataclass to a serialisable dictionary."""
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"success": self.success,
|
||||||
|
"checked_folders": self.checked_folders,
|
||||||
|
"moved_empty_folders": self.moved_empty_folders,
|
||||||
|
"moved_orphaned_folders": self.moved_orphaned_folders,
|
||||||
|
"moved_total": self.moved_empty_folders + self.moved_orphaned_folders,
|
||||||
|
"skipped_non_hash": self.skipped_non_hash,
|
||||||
|
"move_failures": self.move_failures,
|
||||||
|
"errors": self.errors,
|
||||||
|
"deleted_root": self.deleted_root,
|
||||||
|
"partial_success": self.partial_success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesCleanupService:
|
||||||
|
"""Encapsulates logic for cleaning example image folders."""
|
||||||
|
|
||||||
|
DELETED_FOLDER_NAME = "_deleted"
|
||||||
|
|
||||||
|
def __init__(self, deleted_folder_name: str | None = None) -> None:
|
||||||
|
self._deleted_folder_name = deleted_folder_name or self.DELETED_FOLDER_NAME
|
||||||
|
|
||||||
|
async def cleanup_example_image_folders(self) -> Dict[str, object]:
|
||||||
|
"""Clean empty or orphaned example image folders by moving them under a deleted bucket."""
|
||||||
|
|
||||||
|
example_images_path = settings.get("example_images_path")
|
||||||
|
if not example_images_path:
|
||||||
|
logger.debug("Cleanup skipped: example images path not configured")
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": "Example images path is not configured.",
|
||||||
|
"error_code": "path_not_configured",
|
||||||
|
}
|
||||||
|
|
||||||
|
base_root = Path(example_images_path)
|
||||||
|
if not base_root.exists():
|
||||||
|
logger.debug("Cleanup skipped: example images path missing -> %s", base_root)
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": "Example images path does not exist.",
|
||||||
|
"error_code": "path_not_found",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||||
|
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||||
|
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.error("Failed to acquire scanners for cleanup: %s", exc, exc_info=True)
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": f"Failed to load model scanners: {exc}",
|
||||||
|
"error_code": "scanner_initialization_failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
checked_folders = 0
|
||||||
|
moved_empty = 0
|
||||||
|
moved_orphaned = 0
|
||||||
|
skipped_non_hash = 0
|
||||||
|
move_failures = 0
|
||||||
|
errors: List[str] = []
|
||||||
|
|
||||||
|
resolved_base = base_root.resolve()
|
||||||
|
library_paths: List[Tuple[str, Path]] = []
|
||||||
|
processed_paths = {resolved_base}
|
||||||
|
|
||||||
|
for library_name, library_path in iter_library_roots():
|
||||||
|
if not library_path:
|
||||||
|
continue
|
||||||
|
library_root = Path(library_path)
|
||||||
|
try:
|
||||||
|
resolved = library_root.resolve()
|
||||||
|
except FileNotFoundError:
|
||||||
|
continue
|
||||||
|
if resolved in processed_paths:
|
||||||
|
continue
|
||||||
|
if not library_root.exists():
|
||||||
|
logger.debug(
|
||||||
|
"Skipping cleanup for library '%s': folder missing (%s)",
|
||||||
|
library_name,
|
||||||
|
library_root,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
processed_paths.add(resolved)
|
||||||
|
library_paths.append((library_name, library_root))
|
||||||
|
|
||||||
|
deleted_roots: List[Path] = []
|
||||||
|
|
||||||
|
# Build list of (label, root) pairs including the base root for legacy layouts
|
||||||
|
cleanup_targets: List[Tuple[str, Path]] = [("__base__", base_root)] + library_paths
|
||||||
|
|
||||||
|
library_root_set = {root.resolve() for _, root in library_paths}
|
||||||
|
|
||||||
|
for label, root_path in cleanup_targets:
|
||||||
|
deleted_bucket = root_path / self._deleted_folder_name
|
||||||
|
deleted_bucket.mkdir(exist_ok=True)
|
||||||
|
deleted_roots.append(deleted_bucket)
|
||||||
|
|
||||||
|
for entry in os.scandir(root_path):
|
||||||
|
if not entry.is_dir(follow_symlinks=False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if entry.name == self._deleted_folder_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
entry_path = Path(entry.path)
|
||||||
|
|
||||||
|
if label == "__base__":
|
||||||
|
try:
|
||||||
|
resolved_entry = entry_path.resolve()
|
||||||
|
except FileNotFoundError:
|
||||||
|
continue
|
||||||
|
if resolved_entry in library_root_set:
|
||||||
|
# Skip library-specific folders tracked separately
|
||||||
|
continue
|
||||||
|
|
||||||
|
checked_folders += 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self._is_folder_empty(entry_path):
|
||||||
|
if await self._remove_empty_folder(entry_path):
|
||||||
|
moved_empty += 1
|
||||||
|
else:
|
||||||
|
move_failures += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not self._is_hash_folder(entry.name):
|
||||||
|
skipped_non_hash += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
hash_exists = (
|
||||||
|
lora_scanner.has_hash(entry.name)
|
||||||
|
or checkpoint_scanner.has_hash(entry.name)
|
||||||
|
or embedding_scanner.has_hash(entry.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not hash_exists:
|
||||||
|
if await self._move_folder(entry_path, deleted_bucket):
|
||||||
|
moved_orphaned += 1
|
||||||
|
else:
|
||||||
|
move_failures += 1
|
||||||
|
|
||||||
|
except Exception as exc: # pragma: no cover - filesystem guard
|
||||||
|
move_failures += 1
|
||||||
|
error_message = f"{entry.name}: {exc}"
|
||||||
|
errors.append(error_message)
|
||||||
|
logger.error(
|
||||||
|
"Error processing example images folder %s: %s",
|
||||||
|
entry_path,
|
||||||
|
exc,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
partial_success = move_failures > 0 and (moved_empty > 0 or moved_orphaned > 0)
|
||||||
|
success = move_failures == 0 and not errors
|
||||||
|
|
||||||
|
result = CleanupResult(
|
||||||
|
success=success,
|
||||||
|
checked_folders=checked_folders,
|
||||||
|
moved_empty_folders=moved_empty,
|
||||||
|
moved_orphaned_folders=moved_orphaned,
|
||||||
|
skipped_non_hash=skipped_non_hash,
|
||||||
|
move_failures=move_failures,
|
||||||
|
errors=errors,
|
||||||
|
deleted_root=str(deleted_roots[0]) if deleted_roots else None,
|
||||||
|
partial_success=partial_success,
|
||||||
|
)
|
||||||
|
|
||||||
|
summary = result.to_dict()
|
||||||
|
summary["deleted_roots"] = [str(path) for path in deleted_roots]
|
||||||
|
if success:
|
||||||
|
logger.info(
|
||||||
|
"Example images cleanup complete: checked=%s, moved_empty=%s, moved_orphaned=%s",
|
||||||
|
checked_folders,
|
||||||
|
moved_empty,
|
||||||
|
moved_orphaned,
|
||||||
|
)
|
||||||
|
elif partial_success:
|
||||||
|
logger.warning(
|
||||||
|
"Example images cleanup partially complete: moved=%s, failures=%s",
|
||||||
|
summary["moved_total"],
|
||||||
|
move_failures,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"Example images cleanup failed: move_failures=%s, errors=%s",
|
||||||
|
move_failures,
|
||||||
|
errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
return summary
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_folder_empty(folder_path: Path) -> bool:
|
||||||
|
try:
|
||||||
|
with os.scandir(folder_path) as iterator:
|
||||||
|
return not any(iterator)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return True
|
||||||
|
except OSError as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.debug("Failed to inspect folder %s: %s", folder_path, exc)
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_hash_folder(name: str) -> bool:
|
||||||
|
if len(name) != 64:
|
||||||
|
return False
|
||||||
|
hex_chars = set("0123456789abcdefABCDEF")
|
||||||
|
return all(char in hex_chars for char in name)
|
||||||
|
|
||||||
|
async def _remove_empty_folder(self, folder_path: Path) -> bool:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
shutil.rmtree,
|
||||||
|
str(folder_path),
|
||||||
|
)
|
||||||
|
logger.debug("Removed empty example images folder %s", folder_path)
|
||||||
|
return True
|
||||||
|
except Exception as exc: # pragma: no cover - filesystem guard
|
||||||
|
logger.error("Failed to remove empty example images folder %s: %s", folder_path, exc, exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _move_folder(self, folder_path: Path, deleted_bucket: Path) -> bool:
|
||||||
|
destination = self._build_destination(folder_path.name, deleted_bucket)
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
shutil.move,
|
||||||
|
str(folder_path),
|
||||||
|
str(destination),
|
||||||
|
)
|
||||||
|
logger.debug("Moved example images folder %s -> %s", folder_path, destination)
|
||||||
|
return True
|
||||||
|
except Exception as exc: # pragma: no cover - filesystem guard
|
||||||
|
logger.error(
|
||||||
|
"Failed to move example images folder %s to %s: %s",
|
||||||
|
folder_path,
|
||||||
|
destination,
|
||||||
|
exc,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _build_destination(self, folder_name: str, deleted_bucket: Path) -> Path:
|
||||||
|
destination = deleted_bucket / folder_name
|
||||||
|
suffix = 1
|
||||||
|
|
||||||
|
while destination.exists():
|
||||||
|
destination = deleted_bucket / f"{folder_name}_{suffix}"
|
||||||
|
suffix += 1
|
||||||
|
|
||||||
|
return destination
|
||||||
@@ -5,7 +5,6 @@ from typing import Dict, List, Optional
|
|||||||
from .base_model_service import BaseModelService
|
from .base_model_service import BaseModelService
|
||||||
from ..utils.models import LoraMetadata
|
from ..utils.models import LoraMetadata
|
||||||
from ..config import config
|
from ..config import config
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -38,7 +37,7 @@ class LoraService(BaseModelService):
|
|||||||
"usage_tips": lora_data.get("usage_tips", ""),
|
"usage_tips": lora_data.get("usage_tips", ""),
|
||||||
"notes": lora_data.get("notes", ""),
|
"notes": lora_data.get("notes", ""),
|
||||||
"favorite": lora_data.get("favorite", False),
|
"favorite": lora_data.get("favorite", False),
|
||||||
"civitai": ModelRouteUtils.filter_civitai_data(lora_data.get("civitai", {}), minimal=True)
|
"civitai": self.filter_civitai_data(lora_data.get("civitai", {}), minimal=True)
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:
|
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ async def initialize_metadata_providers():
|
|||||||
sqlite_provider = SQLiteModelMetadataProvider(db_path)
|
sqlite_provider = SQLiteModelMetadataProvider(db_path)
|
||||||
provider_manager.register_provider('sqlite', sqlite_provider)
|
provider_manager.register_provider('sqlite', sqlite_provider)
|
||||||
providers.append(('sqlite', sqlite_provider))
|
providers.append(('sqlite', sqlite_provider))
|
||||||
logger.info(f"SQLite metadata provider registered with database: {db_path}")
|
logger.debug(f"SQLite metadata provider registered with database: {db_path}")
|
||||||
else:
|
else:
|
||||||
logger.warning("Metadata archive database is enabled but database file not found")
|
logger.warning("Metadata archive database is enabled but database file not found")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -72,7 +72,7 @@ async def initialize_metadata_providers():
|
|||||||
if ordered_providers:
|
if ordered_providers:
|
||||||
fallback_provider = FallbackMetadataProvider(ordered_providers)
|
fallback_provider = FallbackMetadataProvider(ordered_providers)
|
||||||
provider_manager.register_provider('fallback', fallback_provider, is_default=True)
|
provider_manager.register_provider('fallback', fallback_provider, is_default=True)
|
||||||
logger.info(f"Fallback metadata provider registered with {len(ordered_providers)} providers, Civitai API first")
|
logger.debug(f"Fallback metadata provider registered with {len(ordered_providers)} providers, Civitai API first")
|
||||||
elif len(providers) == 1:
|
elif len(providers) == 1:
|
||||||
# Only one provider available, set it as default
|
# Only one provider available, set it as default
|
||||||
provider_name, provider = providers[0]
|
provider_name, provider = providers[0]
|
||||||
|
|||||||
356
py/services/metadata_sync_service.py
Normal file
356
py/services/metadata_sync_service.py
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
"""Services for synchronising metadata with remote providers."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, Iterable, Optional
|
||||||
|
|
||||||
|
from ..services.settings_manager import SettingsManager
|
||||||
|
from ..utils.model_utils import determine_base_model
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataProviderProtocol:
|
||||||
|
"""Subset of metadata provider interface consumed by the sync service."""
|
||||||
|
|
||||||
|
async def get_model_by_hash(self, sha256: str) -> tuple[Optional[Dict[str, Any]], Optional[str]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_model_version(
|
||||||
|
self, model_id: int, model_version_id: Optional[int]
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataSyncService:
|
||||||
|
"""High level orchestration for metadata synchronisation flows."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
metadata_manager,
|
||||||
|
preview_service,
|
||||||
|
settings: SettingsManager,
|
||||||
|
default_metadata_provider_factory: Callable[[], Awaitable[MetadataProviderProtocol]],
|
||||||
|
metadata_provider_selector: Callable[[str], Awaitable[MetadataProviderProtocol]],
|
||||||
|
) -> None:
|
||||||
|
self._metadata_manager = metadata_manager
|
||||||
|
self._preview_service = preview_service
|
||||||
|
self._settings = settings
|
||||||
|
self._get_default_provider = default_metadata_provider_factory
|
||||||
|
self._get_provider = metadata_provider_selector
|
||||||
|
|
||||||
|
async def load_local_metadata(self, metadata_path: str) -> Dict[str, Any]:
|
||||||
|
"""Load metadata JSON from disk, returning an empty structure when missing."""
|
||||||
|
|
||||||
|
if not os.path.exists(metadata_path):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(metadata_path, "r", encoding="utf-8") as handle:
|
||||||
|
return json.load(handle)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Error loading metadata from %s: %s", metadata_path, exc)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
async def mark_not_found_on_civitai(
|
||||||
|
self, metadata_path: str, local_metadata: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Persist the not-found flag for a metadata payload."""
|
||||||
|
|
||||||
|
local_metadata["from_civitai"] = False
|
||||||
|
await self._metadata_manager.save_metadata(metadata_path, local_metadata)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_civitai_api_metadata(meta: Dict[str, Any]) -> bool:
|
||||||
|
"""Determine if the metadata originated from the CivitAI public API."""
|
||||||
|
|
||||||
|
if not isinstance(meta, dict):
|
||||||
|
return False
|
||||||
|
files = meta.get("files")
|
||||||
|
images = meta.get("images")
|
||||||
|
source = meta.get("source")
|
||||||
|
return bool(files) and bool(images) and source != "archive_db"
|
||||||
|
|
||||||
|
async def update_model_metadata(
|
||||||
|
self,
|
||||||
|
metadata_path: str,
|
||||||
|
local_metadata: Dict[str, Any],
|
||||||
|
civitai_metadata: Dict[str, Any],
|
||||||
|
metadata_provider: Optional[MetadataProviderProtocol] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Merge remote metadata into the local record and persist the result."""
|
||||||
|
|
||||||
|
existing_civitai = local_metadata.get("civitai") or {}
|
||||||
|
|
||||||
|
if (
|
||||||
|
civitai_metadata.get("source") == "archive_db"
|
||||||
|
and self.is_civitai_api_metadata(existing_civitai)
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
"Skip civitai update for %s (%s)",
|
||||||
|
local_metadata.get("model_name", ""),
|
||||||
|
existing_civitai.get("name", ""),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
merged_civitai = existing_civitai.copy()
|
||||||
|
merged_civitai.update(civitai_metadata)
|
||||||
|
|
||||||
|
if civitai_metadata.get("source") == "archive_db":
|
||||||
|
model_name = civitai_metadata.get("model", {}).get("name", "")
|
||||||
|
version_name = civitai_metadata.get("name", "")
|
||||||
|
logger.info(
|
||||||
|
"Recovered metadata from archive_db for deleted model: %s (%s)",
|
||||||
|
model_name,
|
||||||
|
version_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "trainedWords" in existing_civitai:
|
||||||
|
existing_trained = existing_civitai.get("trainedWords", [])
|
||||||
|
new_trained = civitai_metadata.get("trainedWords", [])
|
||||||
|
merged_trained = list(set(existing_trained + new_trained))
|
||||||
|
merged_civitai["trainedWords"] = merged_trained
|
||||||
|
|
||||||
|
local_metadata["civitai"] = merged_civitai
|
||||||
|
|
||||||
|
if "model" in civitai_metadata and civitai_metadata["model"]:
|
||||||
|
model_data = civitai_metadata["model"]
|
||||||
|
|
||||||
|
if model_data.get("name"):
|
||||||
|
local_metadata["model_name"] = model_data["name"]
|
||||||
|
|
||||||
|
if not local_metadata.get("modelDescription") and model_data.get("description"):
|
||||||
|
local_metadata["modelDescription"] = model_data["description"]
|
||||||
|
|
||||||
|
if not local_metadata.get("tags") and model_data.get("tags"):
|
||||||
|
local_metadata["tags"] = model_data["tags"]
|
||||||
|
|
||||||
|
if model_data.get("creator") and not local_metadata.get("civitai", {}).get(
|
||||||
|
"creator"
|
||||||
|
):
|
||||||
|
local_metadata.setdefault("civitai", {})["creator"] = model_data["creator"]
|
||||||
|
|
||||||
|
local_metadata["base_model"] = determine_base_model(
|
||||||
|
civitai_metadata.get("baseModel")
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._preview_service.ensure_preview_for_metadata(
|
||||||
|
metadata_path, local_metadata, civitai_metadata.get("images", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._metadata_manager.save_metadata(metadata_path, local_metadata)
|
||||||
|
return local_metadata
|
||||||
|
|
||||||
|
async def fetch_and_update_model(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
sha256: str,
|
||||||
|
file_path: str,
|
||||||
|
model_data: Dict[str, Any],
|
||||||
|
update_cache_func: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
|
||||||
|
) -> tuple[bool, Optional[str]]:
|
||||||
|
"""Fetch metadata for a model and update both disk and cache state."""
|
||||||
|
|
||||||
|
if not isinstance(model_data, dict):
|
||||||
|
error = f"Invalid model_data type: {type(model_data)}"
|
||||||
|
logger.error(error)
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
|
||||||
|
enable_archive = self._settings.get("enable_metadata_archive_db", False)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if model_data.get("civitai_deleted") is True:
|
||||||
|
if not enable_archive or model_data.get("db_checked") is True:
|
||||||
|
if not enable_archive:
|
||||||
|
error_msg = "CivitAI model is deleted and metadata archive DB is not enabled"
|
||||||
|
else:
|
||||||
|
error_msg = "CivitAI model is deleted and not found in metadata archive DB"
|
||||||
|
return (False, error_msg)
|
||||||
|
metadata_provider = await self._get_provider("sqlite")
|
||||||
|
else:
|
||||||
|
metadata_provider = await self._get_default_provider()
|
||||||
|
|
||||||
|
civitai_metadata, error = await metadata_provider.get_model_by_hash(sha256)
|
||||||
|
if not civitai_metadata:
|
||||||
|
if error == "Model not found":
|
||||||
|
model_data["from_civitai"] = False
|
||||||
|
model_data["civitai_deleted"] = True
|
||||||
|
model_data["db_checked"] = enable_archive
|
||||||
|
model_data["last_checked_at"] = datetime.now().timestamp()
|
||||||
|
|
||||||
|
data_to_save = model_data.copy()
|
||||||
|
data_to_save.pop("folder", None)
|
||||||
|
await self._metadata_manager.save_metadata(file_path, data_to_save)
|
||||||
|
|
||||||
|
error_msg = (
|
||||||
|
f"Error fetching metadata: {error} (model_name={model_data.get('model_name', '')})"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
model_data["from_civitai"] = True
|
||||||
|
model_data["civitai_deleted"] = civitai_metadata.get("source") == "archive_db"
|
||||||
|
model_data["db_checked"] = enable_archive
|
||||||
|
model_data["last_checked_at"] = datetime.now().timestamp()
|
||||||
|
|
||||||
|
local_metadata = model_data.copy()
|
||||||
|
local_metadata.pop("folder", None)
|
||||||
|
|
||||||
|
await self.update_model_metadata(
|
||||||
|
metadata_path,
|
||||||
|
local_metadata,
|
||||||
|
civitai_metadata,
|
||||||
|
metadata_provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
update_payload = {
|
||||||
|
"model_name": local_metadata.get("model_name"),
|
||||||
|
"preview_url": local_metadata.get("preview_url"),
|
||||||
|
"civitai": local_metadata.get("civitai"),
|
||||||
|
}
|
||||||
|
model_data.update(update_payload)
|
||||||
|
|
||||||
|
await update_cache_func(file_path, file_path, local_metadata)
|
||||||
|
return True, None
|
||||||
|
except KeyError as exc:
|
||||||
|
error_msg = f"Error fetching metadata - Missing key: {exc} in model_data={model_data}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
return False, error_msg
|
||||||
|
except Exception as exc: # pragma: no cover - error path
|
||||||
|
error_msg = f"Error fetching metadata: {exc}"
|
||||||
|
logger.error(error_msg, exc_info=True)
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
async def fetch_metadata_by_sha(
|
||||||
|
self, sha256: str, metadata_provider: Optional[MetadataProviderProtocol] = None
|
||||||
|
) -> tuple[Optional[Dict[str, Any]], Optional[str]]:
|
||||||
|
"""Fetch metadata for a SHA256 hash from the configured provider."""
|
||||||
|
|
||||||
|
provider = metadata_provider or await self._get_default_provider()
|
||||||
|
return await provider.get_model_by_hash(sha256)
|
||||||
|
|
||||||
|
async def relink_metadata(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_path: str,
|
||||||
|
metadata: Dict[str, Any],
|
||||||
|
model_id: int,
|
||||||
|
model_version_id: Optional[int],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Relink a local metadata record to a specific CivitAI model version."""
|
||||||
|
|
||||||
|
provider = await self._get_default_provider()
|
||||||
|
civitai_metadata = await provider.get_model_version(model_id, model_version_id)
|
||||||
|
if not civitai_metadata:
|
||||||
|
raise ValueError(
|
||||||
|
f"Model version not found on CivitAI for ID: {model_id}"
|
||||||
|
+ (f" with version: {model_version_id}" if model_version_id else "")
|
||||||
|
)
|
||||||
|
|
||||||
|
primary_model_file: Optional[Dict[str, Any]] = None
|
||||||
|
for file_info in civitai_metadata.get("files", []):
|
||||||
|
if file_info.get("primary", False) and file_info.get("type") == "Model":
|
||||||
|
primary_model_file = file_info
|
||||||
|
break
|
||||||
|
|
||||||
|
if primary_model_file and primary_model_file.get("hashes", {}).get("SHA256"):
|
||||||
|
metadata["sha256"] = primary_model_file["hashes"]["SHA256"].lower()
|
||||||
|
|
||||||
|
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
|
||||||
|
await self.update_model_metadata(
|
||||||
|
metadata_path,
|
||||||
|
metadata,
|
||||||
|
civitai_metadata,
|
||||||
|
provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
async def save_metadata_updates(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_path: str,
|
||||||
|
updates: Dict[str, Any],
|
||||||
|
metadata_loader: Callable[[str], Awaitable[Dict[str, Any]]],
|
||||||
|
update_cache: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Apply metadata updates and persist to disk and cache."""
|
||||||
|
|
||||||
|
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
|
||||||
|
metadata = await metadata_loader(metadata_path)
|
||||||
|
|
||||||
|
for key, value in updates.items():
|
||||||
|
if isinstance(value, dict) and isinstance(metadata.get(key), dict):
|
||||||
|
metadata[key].update(value)
|
||||||
|
else:
|
||||||
|
metadata[key] = value
|
||||||
|
|
||||||
|
await self._metadata_manager.save_metadata(file_path, metadata)
|
||||||
|
await update_cache(file_path, file_path, metadata)
|
||||||
|
|
||||||
|
if "model_name" in updates:
|
||||||
|
logger.debug("Metadata update touched model_name; cache resort required")
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
async def verify_duplicate_hashes(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_paths: Iterable[str],
|
||||||
|
metadata_loader: Callable[[str], Awaitable[Dict[str, Any]]],
|
||||||
|
hash_calculator: Callable[[str], Awaitable[str]],
|
||||||
|
update_cache: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Verify a collection of files share the same SHA256 hash."""
|
||||||
|
|
||||||
|
file_paths = list(file_paths)
|
||||||
|
if not file_paths:
|
||||||
|
raise ValueError("No file paths provided for verification")
|
||||||
|
|
||||||
|
results = {
|
||||||
|
"verified_as_duplicates": True,
|
||||||
|
"mismatched_files": [],
|
||||||
|
"new_hash_map": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_hash: Optional[str] = None
|
||||||
|
first_metadata_path = os.path.splitext(file_paths[0])[0] + ".metadata.json"
|
||||||
|
first_metadata = await metadata_loader(first_metadata_path)
|
||||||
|
if first_metadata and "sha256" in first_metadata:
|
||||||
|
expected_hash = first_metadata["sha256"].lower()
|
||||||
|
|
||||||
|
for path in file_paths:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
actual_hash = await hash_calculator(path)
|
||||||
|
metadata_path = os.path.splitext(path)[0] + ".metadata.json"
|
||||||
|
metadata = await metadata_loader(metadata_path)
|
||||||
|
stored_hash = metadata.get("sha256", "").lower()
|
||||||
|
|
||||||
|
if not expected_hash:
|
||||||
|
expected_hash = stored_hash
|
||||||
|
|
||||||
|
if actual_hash != expected_hash:
|
||||||
|
results["verified_as_duplicates"] = False
|
||||||
|
results["mismatched_files"].append(path)
|
||||||
|
results["new_hash_map"][path] = actual_hash
|
||||||
|
|
||||||
|
if actual_hash != stored_hash:
|
||||||
|
metadata["sha256"] = actual_hash
|
||||||
|
await self._metadata_manager.save_metadata(path, metadata)
|
||||||
|
await update_cache(path, path, metadata)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive path
|
||||||
|
logger.error("Error verifying hash for %s: %s", path, exc)
|
||||||
|
results["mismatched_files"].append(path)
|
||||||
|
results["new_hash_map"][path] = "error_calculating_hash"
|
||||||
|
results["verified_as_duplicates"] = False
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
245
py/services/model_lifecycle_service.py
Normal file
245
py/services/model_lifecycle_service.py
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
"""Service routines for model lifecycle mutations."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Awaitable, Callable, Dict, Iterable, List, Optional
|
||||||
|
|
||||||
|
from ..services.service_registry import ServiceRegistry
|
||||||
|
from ..utils.constants import PREVIEW_EXTENSIONS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_model_artifacts(target_dir: str, file_name: str) -> List[str]:
|
||||||
|
"""Delete the primary model artefacts within ``target_dir``."""
|
||||||
|
|
||||||
|
patterns = [
|
||||||
|
f"{file_name}.safetensors",
|
||||||
|
f"{file_name}.metadata.json",
|
||||||
|
]
|
||||||
|
for ext in PREVIEW_EXTENSIONS:
|
||||||
|
patterns.append(f"{file_name}{ext}")
|
||||||
|
|
||||||
|
deleted: List[str] = []
|
||||||
|
main_file = patterns[0]
|
||||||
|
main_path = os.path.join(target_dir, main_file).replace(os.sep, "/")
|
||||||
|
|
||||||
|
if os.path.exists(main_path):
|
||||||
|
os.remove(main_path)
|
||||||
|
deleted.append(main_path)
|
||||||
|
else:
|
||||||
|
logger.warning("Model file not found: %s", main_file)
|
||||||
|
|
||||||
|
for pattern in patterns[1:]:
|
||||||
|
path = os.path.join(target_dir, pattern)
|
||||||
|
if os.path.exists(path):
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
deleted.append(pattern)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive path
|
||||||
|
logger.warning("Failed to delete %s: %s", pattern, exc)
|
||||||
|
|
||||||
|
return deleted
|
||||||
|
|
||||||
|
|
||||||
|
class ModelLifecycleService:
|
||||||
|
"""Co-ordinate destructive and mutating model operations."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
scanner,
|
||||||
|
metadata_manager,
|
||||||
|
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
|
||||||
|
recipe_scanner_factory: Callable[[], Awaitable] | None = None,
|
||||||
|
) -> None:
|
||||||
|
self._scanner = scanner
|
||||||
|
self._metadata_manager = metadata_manager
|
||||||
|
self._metadata_loader = metadata_loader
|
||||||
|
self._recipe_scanner_factory = (
|
||||||
|
recipe_scanner_factory or ServiceRegistry.get_recipe_scanner
|
||||||
|
)
|
||||||
|
|
||||||
|
async def delete_model(self, file_path: str) -> Dict[str, object]:
|
||||||
|
"""Delete a model file and associated artefacts."""
|
||||||
|
|
||||||
|
if not file_path:
|
||||||
|
raise ValueError("Model path is required")
|
||||||
|
|
||||||
|
target_dir = os.path.dirname(file_path)
|
||||||
|
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||||
|
|
||||||
|
deleted_files = await delete_model_artifacts(target_dir, file_name)
|
||||||
|
|
||||||
|
cache = await self._scanner.get_cached_data()
|
||||||
|
cache.raw_data = [item for item in cache.raw_data if item["file_path"] != file_path]
|
||||||
|
await cache.resort()
|
||||||
|
|
||||||
|
if hasattr(self._scanner, "_hash_index") and self._scanner._hash_index:
|
||||||
|
self._scanner._hash_index.remove_by_path(file_path)
|
||||||
|
|
||||||
|
return {"success": True, "deleted_files": deleted_files}
|
||||||
|
|
||||||
|
async def exclude_model(self, file_path: str) -> Dict[str, object]:
|
||||||
|
"""Mark a model as excluded and prune cache references."""
|
||||||
|
|
||||||
|
if not file_path:
|
||||||
|
raise ValueError("Model path is required")
|
||||||
|
|
||||||
|
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
|
||||||
|
metadata = await self._metadata_loader(metadata_path)
|
||||||
|
metadata["exclude"] = True
|
||||||
|
|
||||||
|
await self._metadata_manager.save_metadata(file_path, metadata)
|
||||||
|
|
||||||
|
cache = await self._scanner.get_cached_data()
|
||||||
|
model_to_remove = next(
|
||||||
|
(item for item in cache.raw_data if item["file_path"] == file_path),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if model_to_remove:
|
||||||
|
for tag in model_to_remove.get("tags", []):
|
||||||
|
if tag in getattr(self._scanner, "_tags_count", {}):
|
||||||
|
self._scanner._tags_count[tag] = max(
|
||||||
|
0, self._scanner._tags_count[tag] - 1
|
||||||
|
)
|
||||||
|
if self._scanner._tags_count[tag] == 0:
|
||||||
|
del self._scanner._tags_count[tag]
|
||||||
|
|
||||||
|
if hasattr(self._scanner, "_hash_index") and self._scanner._hash_index:
|
||||||
|
self._scanner._hash_index.remove_by_path(file_path)
|
||||||
|
|
||||||
|
cache.raw_data = [
|
||||||
|
item for item in cache.raw_data if item["file_path"] != file_path
|
||||||
|
]
|
||||||
|
await cache.resort()
|
||||||
|
|
||||||
|
excluded = getattr(self._scanner, "_excluded_models", None)
|
||||||
|
if isinstance(excluded, list):
|
||||||
|
excluded.append(file_path)
|
||||||
|
|
||||||
|
message = f"Model {os.path.basename(file_path)} excluded"
|
||||||
|
return {"success": True, "message": message}
|
||||||
|
|
||||||
|
async def bulk_delete_models(self, file_paths: Iterable[str]) -> Dict[str, object]:
|
||||||
|
"""Delete a collection of models via the scanner bulk operation."""
|
||||||
|
|
||||||
|
file_paths = list(file_paths)
|
||||||
|
if not file_paths:
|
||||||
|
raise ValueError("No file paths provided for deletion")
|
||||||
|
|
||||||
|
return await self._scanner.bulk_delete_models(file_paths)
|
||||||
|
|
||||||
|
async def rename_model(
|
||||||
|
self, *, file_path: str, new_file_name: str
|
||||||
|
) -> Dict[str, object]:
|
||||||
|
"""Rename a model and its companion artefacts."""
|
||||||
|
|
||||||
|
if not file_path or not new_file_name:
|
||||||
|
raise ValueError("File path and new file name are required")
|
||||||
|
|
||||||
|
invalid_chars = {"/", "\\", ":", "*", "?", '"', "<", ">", "|"}
|
||||||
|
if any(char in new_file_name for char in invalid_chars):
|
||||||
|
raise ValueError("Invalid characters in file name")
|
||||||
|
|
||||||
|
target_dir = os.path.dirname(file_path)
|
||||||
|
old_file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||||
|
new_file_path = os.path.join(target_dir, f"{new_file_name}.safetensors").replace(
|
||||||
|
os.sep, "/"
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.exists(new_file_path):
|
||||||
|
raise ValueError("A file with this name already exists")
|
||||||
|
|
||||||
|
patterns = [
|
||||||
|
f"{old_file_name}.safetensors",
|
||||||
|
f"{old_file_name}.metadata.json",
|
||||||
|
f"{old_file_name}.metadata.json.bak",
|
||||||
|
]
|
||||||
|
for ext in PREVIEW_EXTENSIONS:
|
||||||
|
patterns.append(f"{old_file_name}{ext}")
|
||||||
|
|
||||||
|
existing_files: List[tuple[str, str]] = []
|
||||||
|
for pattern in patterns:
|
||||||
|
path = os.path.join(target_dir, pattern)
|
||||||
|
if os.path.exists(path):
|
||||||
|
existing_files.append((path, pattern))
|
||||||
|
|
||||||
|
metadata_path = os.path.join(target_dir, f"{old_file_name}.metadata.json")
|
||||||
|
metadata: Optional[Dict[str, object]] = None
|
||||||
|
hash_value: Optional[str] = None
|
||||||
|
|
||||||
|
if os.path.exists(metadata_path):
|
||||||
|
metadata = await self._metadata_loader(metadata_path)
|
||||||
|
hash_value = metadata.get("sha256") if isinstance(metadata, dict) else None
|
||||||
|
|
||||||
|
renamed_files: List[str] = []
|
||||||
|
new_metadata_path: Optional[str] = None
|
||||||
|
new_preview: Optional[str] = None
|
||||||
|
|
||||||
|
for old_path, pattern in existing_files:
|
||||||
|
ext = self._get_multipart_ext(pattern)
|
||||||
|
new_path = os.path.join(target_dir, f"{new_file_name}{ext}").replace(
|
||||||
|
os.sep, "/"
|
||||||
|
)
|
||||||
|
os.rename(old_path, new_path)
|
||||||
|
renamed_files.append(new_path)
|
||||||
|
|
||||||
|
if ext == ".metadata.json":
|
||||||
|
new_metadata_path = new_path
|
||||||
|
|
||||||
|
if metadata and new_metadata_path:
|
||||||
|
metadata["file_name"] = new_file_name
|
||||||
|
metadata["file_path"] = new_file_path
|
||||||
|
|
||||||
|
if metadata.get("preview_url"):
|
||||||
|
old_preview = str(metadata["preview_url"])
|
||||||
|
ext = self._get_multipart_ext(old_preview)
|
||||||
|
new_preview = os.path.join(target_dir, f"{new_file_name}{ext}").replace(
|
||||||
|
os.sep, "/"
|
||||||
|
)
|
||||||
|
metadata["preview_url"] = new_preview
|
||||||
|
|
||||||
|
await self._metadata_manager.save_metadata(new_file_path, metadata)
|
||||||
|
|
||||||
|
if metadata:
|
||||||
|
await self._scanner.update_single_model_cache(
|
||||||
|
file_path, new_file_path, metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
if hash_value and getattr(self._scanner, "model_type", "") == "lora":
|
||||||
|
recipe_scanner = await self._recipe_scanner_factory()
|
||||||
|
if recipe_scanner:
|
||||||
|
try:
|
||||||
|
await recipe_scanner.update_lora_filename_by_hash(
|
||||||
|
hash_value, new_file_name
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error(
|
||||||
|
"Error updating recipe references for %s: %s",
|
||||||
|
file_path,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"new_file_path": new_file_path,
|
||||||
|
"new_preview_path": new_preview,
|
||||||
|
"renamed_files": renamed_files,
|
||||||
|
"reload_required": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_multipart_ext(filename: str) -> str:
|
||||||
|
"""Return the extension for files with compound suffixes."""
|
||||||
|
|
||||||
|
parts = filename.split(".")
|
||||||
|
if len(parts) == 3:
|
||||||
|
return "." + ".".join(parts[-2:])
|
||||||
|
if len(parts) >= 4:
|
||||||
|
return "." + ".".join(parts[-3:])
|
||||||
|
return os.path.splitext(filename)[1]
|
||||||
|
|
||||||
@@ -1,19 +1,48 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
import json
|
import json
|
||||||
import aiosqlite
|
|
||||||
import logging
|
import logging
|
||||||
import aiohttp
|
from typing import Optional, Dict, Tuple, Any
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from typing import Optional, Dict, Tuple
|
|
||||||
from .downloader import get_downloader
|
from .downloader import get_downloader
|
||||||
|
|
||||||
|
try:
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
except ImportError as exc:
|
||||||
|
BeautifulSoup = None # type: ignore[assignment]
|
||||||
|
_BS4_IMPORT_ERROR = exc
|
||||||
|
else:
|
||||||
|
_BS4_IMPORT_ERROR = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import aiosqlite
|
||||||
|
except ImportError as exc:
|
||||||
|
aiosqlite = None # type: ignore[assignment]
|
||||||
|
_AIOSQLITE_IMPORT_ERROR = exc
|
||||||
|
else:
|
||||||
|
_AIOSQLITE_IMPORT_ERROR = None
|
||||||
|
|
||||||
|
def _require_beautifulsoup() -> Any:
|
||||||
|
if BeautifulSoup is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"BeautifulSoup (bs4) is required for CivArchiveModelMetadataProvider. "
|
||||||
|
"Install it with 'pip install beautifulsoup4'."
|
||||||
|
) from _BS4_IMPORT_ERROR
|
||||||
|
return BeautifulSoup
|
||||||
|
|
||||||
|
def _require_aiosqlite() -> Any:
|
||||||
|
if aiosqlite is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"aiosqlite is required for SQLiteModelMetadataProvider. "
|
||||||
|
"Install it with 'pip install aiosqlite'."
|
||||||
|
) from _AIOSQLITE_IMPORT_ERROR
|
||||||
|
return aiosqlite
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class ModelMetadataProvider(ABC):
|
class ModelMetadataProvider(ABC):
|
||||||
"""Base abstract class for all model metadata providers"""
|
"""Base abstract class for all model metadata providers"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Find model by hash value"""
|
"""Find model by hash value"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -31,11 +60,6 @@ class ModelMetadataProvider(ABC):
|
|||||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Fetch model version metadata"""
|
"""Fetch model version metadata"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
"""Fetch model metadata (description, tags, and creator info)"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
class CivitaiModelMetadataProvider(ModelMetadataProvider):
|
class CivitaiModelMetadataProvider(ModelMetadataProvider):
|
||||||
"""Provider that uses Civitai API for metadata"""
|
"""Provider that uses Civitai API for metadata"""
|
||||||
@@ -43,7 +67,7 @@ class CivitaiModelMetadataProvider(ModelMetadataProvider):
|
|||||||
def __init__(self, civitai_client):
|
def __init__(self, civitai_client):
|
||||||
self.client = civitai_client
|
self.client = civitai_client
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
return await self.client.get_model_by_hash(model_hash)
|
return await self.client.get_model_by_hash(model_hash)
|
||||||
|
|
||||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||||
@@ -54,16 +78,13 @@ class CivitaiModelMetadataProvider(ModelMetadataProvider):
|
|||||||
|
|
||||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
return await self.client.get_model_version_info(version_id)
|
return await self.client.get_model_version_info(version_id)
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
return await self.client.get_model_metadata(model_id)
|
|
||||||
|
|
||||||
class CivArchiveModelMetadataProvider(ModelMetadataProvider):
|
class CivArchiveModelMetadataProvider(ModelMetadataProvider):
|
||||||
"""Provider that uses CivArchive HTML page parsing for metadata"""
|
"""Provider that uses CivArchive HTML page parsing for metadata"""
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Not supported by CivArchive provider"""
|
"""Not supported by CivArchive provider"""
|
||||||
return None
|
return None, "CivArchive provider does not support hash lookup"
|
||||||
|
|
||||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||||
"""Not supported by CivArchive provider"""
|
"""Not supported by CivArchive provider"""
|
||||||
@@ -87,7 +108,8 @@ class CivArchiveModelMetadataProvider(ModelMetadataProvider):
|
|||||||
html_content = await response.text()
|
html_content = await response.text()
|
||||||
|
|
||||||
# Parse HTML to extract JSON data
|
# Parse HTML to extract JSON data
|
||||||
soup = BeautifulSoup(html_content, 'html.parser')
|
soup_parser = _require_beautifulsoup()
|
||||||
|
soup = soup_parser(html_content, 'html.parser')
|
||||||
script_tag = soup.find('script', {'id': '__NEXT_DATA__', 'type': 'application/json'})
|
script_tag = soup.find('script', {'id': '__NEXT_DATA__', 'type': 'application/json'})
|
||||||
|
|
||||||
if not script_tag:
|
if not script_tag:
|
||||||
@@ -174,20 +196,17 @@ class CivArchiveModelMetadataProvider(ModelMetadataProvider):
|
|||||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Not supported by CivArchive provider - requires both model_id and version_id"""
|
"""Not supported by CivArchive provider - requires both model_id and version_id"""
|
||||||
return None, "CivArchive provider requires both model_id and version_id"
|
return None, "CivArchive provider requires both model_id and version_id"
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
"""Not supported by CivArchive provider"""
|
|
||||||
return None, 404
|
|
||||||
|
|
||||||
class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
||||||
"""Provider that uses SQLite database for metadata"""
|
"""Provider that uses SQLite database for metadata"""
|
||||||
|
|
||||||
def __init__(self, db_path: str):
|
def __init__(self, db_path: str):
|
||||||
self.db_path = db_path
|
self.db_path = db_path
|
||||||
|
self._aiosqlite = _require_aiosqlite()
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Find model by hash value from SQLite database"""
|
"""Find model by hash value from SQLite database"""
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with self._aiosqlite.connect(self.db_path) as db:
|
||||||
# Look up in model_files table to get model_id and version_id
|
# Look up in model_files table to get model_id and version_id
|
||||||
query = """
|
query = """
|
||||||
SELECT model_id, version_id
|
SELECT model_id, version_id
|
||||||
@@ -195,24 +214,25 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
WHERE sha256 = ?
|
WHERE sha256 = ?
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
"""
|
"""
|
||||||
db.row_factory = aiosqlite.Row
|
db.row_factory = self._aiosqlite.Row
|
||||||
cursor = await db.execute(query, (model_hash.upper(),))
|
cursor = await db.execute(query, (model_hash.upper(),))
|
||||||
file_row = await cursor.fetchone()
|
file_row = await cursor.fetchone()
|
||||||
|
|
||||||
if not file_row:
|
if not file_row:
|
||||||
return None
|
return None, "Model not found"
|
||||||
|
|
||||||
# Get version details
|
# Get version details
|
||||||
model_id = file_row['model_id']
|
model_id = file_row['model_id']
|
||||||
version_id = file_row['version_id']
|
version_id = file_row['version_id']
|
||||||
|
|
||||||
# Build response in the same format as Civitai API
|
# Build response in the same format as Civitai API
|
||||||
return await self._get_version_with_model_data(db, model_id, version_id)
|
result = await self._get_version_with_model_data(db, model_id, version_id)
|
||||||
|
return result, None if result else "Error retrieving model data"
|
||||||
|
|
||||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||||
"""Get all versions of a model from SQLite database"""
|
"""Get all versions of a model from SQLite database"""
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with self._aiosqlite.connect(self.db_path) as db:
|
||||||
db.row_factory = aiosqlite.Row
|
db.row_factory = self._aiosqlite.Row
|
||||||
|
|
||||||
# First check if model exists
|
# First check if model exists
|
||||||
model_query = "SELECT * FROM models WHERE id = ?"
|
model_query = "SELECT * FROM models WHERE id = ?"
|
||||||
@@ -224,6 +244,7 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
|
|
||||||
model_data = json.loads(model_row['data'])
|
model_data = json.loads(model_row['data'])
|
||||||
model_type = model_row['type']
|
model_type = model_row['type']
|
||||||
|
model_name = model_row['name']
|
||||||
|
|
||||||
# Get all versions for this model
|
# Get all versions for this model
|
||||||
versions_query = """
|
versions_query = """
|
||||||
@@ -260,7 +281,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
'modelVersions': model_versions,
|
'modelVersions': model_versions,
|
||||||
'type': model_type
|
'type': model_type,
|
||||||
|
'name': model_name
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||||
@@ -268,8 +290,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
if not model_id and not version_id:
|
if not model_id and not version_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with self._aiosqlite.connect(self.db_path) as db:
|
||||||
db.row_factory = aiosqlite.Row
|
db.row_factory = self._aiosqlite.Row
|
||||||
|
|
||||||
# Case 1: Only version_id is provided
|
# Case 1: Only version_id is provided
|
||||||
if model_id is None and version_id is not None:
|
if model_id is None and version_id is not None:
|
||||||
@@ -305,8 +327,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
|
|
||||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Fetch model version metadata from SQLite database"""
|
"""Fetch model version metadata from SQLite database"""
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
async with self._aiosqlite.connect(self.db_path) as db:
|
||||||
db.row_factory = aiosqlite.Row
|
db.row_factory = self._aiosqlite.Row
|
||||||
|
|
||||||
# Get version details
|
# Get version details
|
||||||
version_query = "SELECT model_id FROM model_versions WHERE id = ?"
|
version_query = "SELECT model_id FROM model_versions WHERE id = ?"
|
||||||
@@ -322,37 +344,6 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
version_data = await self._get_version_with_model_data(db, model_id, version_id)
|
version_data = await self._get_version_with_model_data(db, model_id, version_id)
|
||||||
return version_data, None
|
return version_data, None
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
"""Fetch model metadata from SQLite database"""
|
|
||||||
async with aiosqlite.connect(self.db_path) as db:
|
|
||||||
db.row_factory = aiosqlite.Row
|
|
||||||
|
|
||||||
# Get model details
|
|
||||||
model_query = "SELECT name, type, data, username FROM models WHERE id = ?"
|
|
||||||
cursor = await db.execute(model_query, (model_id,))
|
|
||||||
model_row = await cursor.fetchone()
|
|
||||||
|
|
||||||
if not model_row:
|
|
||||||
return None, 404
|
|
||||||
|
|
||||||
# Parse data JSON
|
|
||||||
try:
|
|
||||||
model_data = json.loads(model_row['data'])
|
|
||||||
|
|
||||||
# Extract relevant metadata
|
|
||||||
metadata = {
|
|
||||||
"description": model_data.get("description", "No model description available"),
|
|
||||||
"tags": model_data.get("tags", []),
|
|
||||||
"creator": {
|
|
||||||
"username": model_row['username'] or model_data.get("creator", {}).get("username"),
|
|
||||||
"image": model_data.get("creator", {}).get("image")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return metadata, 200
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
return None, 500
|
|
||||||
|
|
||||||
async def _get_version_with_model_data(self, db, model_id, version_id) -> Optional[Dict]:
|
async def _get_version_with_model_data(self, db, model_id, version_id) -> Optional[Dict]:
|
||||||
"""Helper to build version data with model information"""
|
"""Helper to build version data with model information"""
|
||||||
# Get version details
|
# Get version details
|
||||||
@@ -398,6 +389,45 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
|
|||||||
# Add any additional fields from version data
|
# Add any additional fields from version data
|
||||||
result.update(version_data)
|
result.update(version_data)
|
||||||
|
|
||||||
|
# Attach files associated with this version from model_files table
|
||||||
|
files_query = """
|
||||||
|
SELECT data
|
||||||
|
FROM model_files
|
||||||
|
WHERE version_id = ? AND type = 'Model'
|
||||||
|
ORDER BY id ASC
|
||||||
|
"""
|
||||||
|
cursor = await db.execute(files_query, (version_id,))
|
||||||
|
file_rows = await cursor.fetchall()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
for file_row in file_rows:
|
||||||
|
try:
|
||||||
|
file_data = json.loads(file_row['data'])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.warning(
|
||||||
|
"Skipping model_files entry with invalid JSON for version_id %s", version_id
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
# Remove 'modelId' and 'modelVersionId' fields if present
|
||||||
|
file_data.pop('modelId', None)
|
||||||
|
file_data.pop('modelVersionId', None)
|
||||||
|
files.append(file_data)
|
||||||
|
|
||||||
|
if 'files' in result:
|
||||||
|
existing_files = result['files']
|
||||||
|
if isinstance(existing_files, list):
|
||||||
|
existing_files.extend(files)
|
||||||
|
result['files'] = existing_files
|
||||||
|
else:
|
||||||
|
merged_files = files.copy()
|
||||||
|
if existing_files:
|
||||||
|
merged_files.insert(0, existing_files)
|
||||||
|
result['files'] = merged_files
|
||||||
|
elif files:
|
||||||
|
result['files'] = files
|
||||||
|
else:
|
||||||
|
result['files'] = []
|
||||||
|
|
||||||
return result
|
return result
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
return None
|
return None
|
||||||
@@ -407,15 +437,16 @@ class FallbackMetadataProvider(ModelMetadataProvider):
|
|||||||
def __init__(self, providers: list):
|
def __init__(self, providers: list):
|
||||||
self.providers = providers
|
self.providers = providers
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
for provider in self.providers:
|
for provider in self.providers:
|
||||||
try:
|
try:
|
||||||
result = await provider.get_model_by_hash(model_hash)
|
result, error = await provider.get_model_by_hash(model_hash)
|
||||||
if result:
|
if result:
|
||||||
return result
|
return result, error
|
||||||
except Exception:
|
except Exception as e:
|
||||||
|
logger.debug(f"Provider failed for get_model_by_hash: {e}")
|
||||||
continue
|
continue
|
||||||
return None
|
return None, "Model not found"
|
||||||
|
|
||||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||||
for provider in self.providers:
|
for provider in self.providers:
|
||||||
@@ -450,17 +481,6 @@ class FallbackMetadataProvider(ModelMetadataProvider):
|
|||||||
continue
|
continue
|
||||||
return None, "No provider could retrieve the data"
|
return None, "No provider could retrieve the data"
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
|
|
||||||
for provider in self.providers:
|
|
||||||
try:
|
|
||||||
result, status = await provider.get_model_metadata(model_id)
|
|
||||||
if result:
|
|
||||||
return result, status
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f"Provider failed for get_model_metadata: {e}")
|
|
||||||
continue
|
|
||||||
return None, 404
|
|
||||||
|
|
||||||
class ModelMetadataProviderManager:
|
class ModelMetadataProviderManager:
|
||||||
"""Manager for selecting and using model metadata providers"""
|
"""Manager for selecting and using model metadata providers"""
|
||||||
|
|
||||||
@@ -483,7 +503,7 @@ class ModelMetadataProviderManager:
|
|||||||
if is_default or self.default_provider is None:
|
if is_default or self.default_provider is None:
|
||||||
self.default_provider = name
|
self.default_provider = name
|
||||||
|
|
||||||
async def get_model_by_hash(self, model_hash: str, provider_name: str = None) -> Optional[Dict]:
|
async def get_model_by_hash(self, model_hash: str, provider_name: str = None) -> Tuple[Optional[Dict], Optional[str]]:
|
||||||
"""Find model by hash using specified or default provider"""
|
"""Find model by hash using specified or default provider"""
|
||||||
provider = self._get_provider(provider_name)
|
provider = self._get_provider(provider_name)
|
||||||
return await provider.get_model_by_hash(model_hash)
|
return await provider.get_model_by_hash(model_hash)
|
||||||
@@ -503,11 +523,6 @@ class ModelMetadataProviderManager:
|
|||||||
provider = self._get_provider(provider_name)
|
provider = self._get_provider(provider_name)
|
||||||
return await provider.get_model_version_info(version_id)
|
return await provider.get_model_version_info(version_id)
|
||||||
|
|
||||||
async def get_model_metadata(self, model_id: str, provider_name: str = None) -> Tuple[Optional[Dict], int]:
|
|
||||||
"""Fetch model metadata using specified or default provider"""
|
|
||||||
provider = self._get_provider(provider_name)
|
|
||||||
return await provider.get_model_metadata(model_id)
|
|
||||||
|
|
||||||
def _get_provider(self, provider_name: str = None) -> ModelMetadataProvider:
|
def _get_provider(self, provider_name: str = None) -> ModelMetadataProvider:
|
||||||
"""Get provider by name or default provider"""
|
"""Get provider by name or default provider"""
|
||||||
if provider_name and provider_name in self.providers:
|
if provider_name and provider_name in self.providers:
|
||||||
|
|||||||
196
py/services/model_query.py
Normal file
196
py/services/model_query.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Protocol, Callable
|
||||||
|
|
||||||
|
from ..utils.constants import NSFW_LEVELS
|
||||||
|
from ..utils.utils import fuzzy_match as default_fuzzy_match
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsProvider(Protocol):
|
||||||
|
"""Protocol describing the SettingsManager contract used by query helpers."""
|
||||||
|
|
||||||
|
def get(self, key: str, default: Any = None) -> Any:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SortParams:
|
||||||
|
"""Normalized representation of sorting instructions."""
|
||||||
|
|
||||||
|
key: str
|
||||||
|
order: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class FilterCriteria:
|
||||||
|
"""Container for model list filtering options."""
|
||||||
|
|
||||||
|
folder: Optional[str] = None
|
||||||
|
base_models: Optional[Sequence[str]] = None
|
||||||
|
tags: Optional[Sequence[str]] = None
|
||||||
|
favorites_only: bool = False
|
||||||
|
search_options: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ModelCacheRepository:
|
||||||
|
"""Adapter around scanner cache access and sort normalisation."""
|
||||||
|
|
||||||
|
def __init__(self, scanner) -> None:
|
||||||
|
self._scanner = scanner
|
||||||
|
|
||||||
|
async def get_cache(self):
|
||||||
|
"""Return the underlying cache instance from the scanner."""
|
||||||
|
return await self._scanner.get_cached_data()
|
||||||
|
|
||||||
|
async def fetch_sorted(self, params: SortParams) -> List[Dict[str, Any]]:
|
||||||
|
"""Fetch cached data pre-sorted according to ``params``."""
|
||||||
|
cache = await self.get_cache()
|
||||||
|
return await cache.get_sorted_data(params.key, params.order)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_sort(sort_by: str) -> SortParams:
|
||||||
|
"""Parse an incoming sort string into key/order primitives."""
|
||||||
|
if not sort_by:
|
||||||
|
return SortParams(key="name", order="asc")
|
||||||
|
|
||||||
|
if ":" in sort_by:
|
||||||
|
raw_key, raw_order = sort_by.split(":", 1)
|
||||||
|
sort_key = raw_key.strip().lower() or "name"
|
||||||
|
order = raw_order.strip().lower()
|
||||||
|
else:
|
||||||
|
sort_key = sort_by.strip().lower() or "name"
|
||||||
|
order = "asc"
|
||||||
|
|
||||||
|
if order not in ("asc", "desc"):
|
||||||
|
order = "asc"
|
||||||
|
|
||||||
|
return SortParams(key=sort_key, order=order)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelFilterSet:
|
||||||
|
"""Applies common filtering rules to the model collection."""
|
||||||
|
|
||||||
|
def __init__(self, settings: SettingsProvider, nsfw_levels: Optional[Dict[str, int]] = None) -> None:
|
||||||
|
self._settings = settings
|
||||||
|
self._nsfw_levels = nsfw_levels or NSFW_LEVELS
|
||||||
|
|
||||||
|
def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]:
|
||||||
|
"""Return items that satisfy the provided criteria."""
|
||||||
|
items = list(data)
|
||||||
|
|
||||||
|
if self._settings.get("show_only_sfw", False):
|
||||||
|
threshold = self._nsfw_levels.get("R", 0)
|
||||||
|
items = [
|
||||||
|
item for item in items
|
||||||
|
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
|
||||||
|
]
|
||||||
|
|
||||||
|
if criteria.favorites_only:
|
||||||
|
items = [item for item in items if item.get("favorite", False)]
|
||||||
|
|
||||||
|
folder = criteria.folder
|
||||||
|
options = criteria.search_options or {}
|
||||||
|
recursive = bool(options.get("recursive", True))
|
||||||
|
if folder is not None:
|
||||||
|
if recursive:
|
||||||
|
if folder:
|
||||||
|
folder_with_sep = f"{folder}/"
|
||||||
|
items = [
|
||||||
|
item for item in items
|
||||||
|
if item.get("folder") == folder or item.get("folder", "").startswith(folder_with_sep)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
items = [item for item in items if item.get("folder") == folder]
|
||||||
|
|
||||||
|
base_models = criteria.base_models or []
|
||||||
|
if base_models:
|
||||||
|
base_model_set = set(base_models)
|
||||||
|
items = [item for item in items if item.get("base_model") in base_model_set]
|
||||||
|
|
||||||
|
tags = criteria.tags or []
|
||||||
|
if tags:
|
||||||
|
tag_set = set(tags)
|
||||||
|
items = [
|
||||||
|
item for item in items
|
||||||
|
if any(tag in tag_set for tag in item.get("tags", []))
|
||||||
|
]
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
class SearchStrategy:
|
||||||
|
"""Encapsulates text and fuzzy matching behaviour for model queries."""
|
||||||
|
|
||||||
|
DEFAULT_OPTIONS: Dict[str, Any] = {
|
||||||
|
"filename": True,
|
||||||
|
"modelname": True,
|
||||||
|
"tags": False,
|
||||||
|
"recursive": True,
|
||||||
|
"creator": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fuzzy_matcher: Optional[Callable[[str, str], bool]] = None) -> None:
|
||||||
|
self._fuzzy_match = fuzzy_matcher or default_fuzzy_match
|
||||||
|
|
||||||
|
def normalize_options(self, options: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
||||||
|
"""Merge provided options with defaults without mutating input."""
|
||||||
|
normalized = dict(self.DEFAULT_OPTIONS)
|
||||||
|
if options:
|
||||||
|
normalized.update(options)
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def apply(
|
||||||
|
self,
|
||||||
|
data: Iterable[Dict[str, Any]],
|
||||||
|
search_term: str,
|
||||||
|
options: Dict[str, Any],
|
||||||
|
fuzzy: bool = False,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Return items matching the search term using the configured strategy."""
|
||||||
|
if not search_term:
|
||||||
|
return list(data)
|
||||||
|
|
||||||
|
search_lower = search_term.lower()
|
||||||
|
results: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
for item in data:
|
||||||
|
if options.get("filename", True):
|
||||||
|
candidate = item.get("file_name", "")
|
||||||
|
if self._matches(candidate, search_term, search_lower, fuzzy):
|
||||||
|
results.append(item)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if options.get("modelname", True):
|
||||||
|
candidate = item.get("model_name", "")
|
||||||
|
if self._matches(candidate, search_term, search_lower, fuzzy):
|
||||||
|
results.append(item)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if options.get("tags", False):
|
||||||
|
tags = item.get("tags", []) or []
|
||||||
|
if any(self._matches(tag, search_term, search_lower, fuzzy) for tag in tags):
|
||||||
|
results.append(item)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if options.get("creator", False):
|
||||||
|
creator_username = ""
|
||||||
|
civitai = item.get("civitai")
|
||||||
|
if isinstance(civitai, dict):
|
||||||
|
creator = civitai.get("creator")
|
||||||
|
if isinstance(creator, dict):
|
||||||
|
creator_username = creator.get("username", "")
|
||||||
|
if creator_username and self._matches(creator_username, search_term, search_lower, fuzzy):
|
||||||
|
results.append(item)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _matches(self, candidate: str, search_term: str, search_lower: str, fuzzy: bool) -> bool:
|
||||||
|
if not candidate:
|
||||||
|
return False
|
||||||
|
|
||||||
|
candidate_lower = candidate.lower()
|
||||||
|
if fuzzy:
|
||||||
|
return self._fuzzy_match(candidate, search_term)
|
||||||
|
return search_lower in candidate_lower
|
||||||
@@ -4,7 +4,8 @@ import logging
|
|||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
from typing import List, Dict, Optional, Type, Set
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional, Set, Type, Union
|
||||||
|
|
||||||
from ..utils.models import BaseModelMetadata
|
from ..utils.models import BaseModelMetadata
|
||||||
from ..config import config
|
from ..config import config
|
||||||
@@ -13,11 +14,23 @@ from ..utils.metadata_manager import MetadataManager
|
|||||||
from .model_cache import ModelCache
|
from .model_cache import ModelCache
|
||||||
from .model_hash_index import ModelHashIndex
|
from .model_hash_index import ModelHashIndex
|
||||||
from ..utils.constants import PREVIEW_EXTENSIONS
|
from ..utils.constants import PREVIEW_EXTENSIONS
|
||||||
|
from .model_lifecycle_service import delete_model_artifacts
|
||||||
from .service_registry import ServiceRegistry
|
from .service_registry import ServiceRegistry
|
||||||
from .websocket_manager import ws_manager
|
from .websocket_manager import ws_manager
|
||||||
|
from .persistent_model_cache import get_persistent_cache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CacheBuildResult:
|
||||||
|
"""Represents the outcome of scanning model files for cache building."""
|
||||||
|
|
||||||
|
raw_data: List[Dict]
|
||||||
|
hash_index: ModelHashIndex
|
||||||
|
tags_count: Dict[str, int]
|
||||||
|
excluded_models: List[str]
|
||||||
|
|
||||||
class ModelScanner:
|
class ModelScanner:
|
||||||
"""Base service for scanning and managing model files"""
|
"""Base service for scanning and managing model files"""
|
||||||
|
|
||||||
@@ -67,16 +80,123 @@ class ModelScanner:
|
|||||||
self._tags_count = {} # Dictionary to store tag counts
|
self._tags_count = {} # Dictionary to store tag counts
|
||||||
self._is_initializing = False # Flag to track initialization state
|
self._is_initializing = False # Flag to track initialization state
|
||||||
self._excluded_models = [] # List to track excluded models
|
self._excluded_models = [] # List to track excluded models
|
||||||
|
self._persistent_cache = get_persistent_cache()
|
||||||
self._initialized = True
|
self._initialized = True
|
||||||
|
|
||||||
# Register this service
|
# Register this service
|
||||||
asyncio.create_task(self._register_service())
|
asyncio.create_task(self._register_service())
|
||||||
|
|
||||||
|
def on_library_changed(self) -> None:
|
||||||
|
"""Reset caches when the active library changes."""
|
||||||
|
self._persistent_cache = get_persistent_cache()
|
||||||
|
self._cache = None
|
||||||
|
self._hash_index = ModelHashIndex()
|
||||||
|
self._tags_count = {}
|
||||||
|
self._excluded_models = []
|
||||||
|
self._is_initializing = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
loop = None
|
||||||
|
|
||||||
|
if loop and not loop.is_closed():
|
||||||
|
loop.create_task(self.initialize_in_background())
|
||||||
|
|
||||||
async def _register_service(self):
|
async def _register_service(self):
|
||||||
"""Register this instance with the ServiceRegistry"""
|
"""Register this instance with the ServiceRegistry"""
|
||||||
service_name = f"{self.model_type}_scanner"
|
service_name = f"{self.model_type}_scanner"
|
||||||
await ServiceRegistry.register_service(service_name, self)
|
await ServiceRegistry.register_service(service_name, self)
|
||||||
|
|
||||||
|
def _slim_civitai_payload(self, civitai: Optional[Mapping[str, Any]]) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Return a lightweight civitai payload containing only frequently used keys."""
|
||||||
|
if not isinstance(civitai, Mapping) or not civitai:
|
||||||
|
return None
|
||||||
|
|
||||||
|
slim: Dict[str, Any] = {}
|
||||||
|
for key in ('id', 'modelId', 'name'):
|
||||||
|
value = civitai.get(key)
|
||||||
|
if value not in (None, '', []):
|
||||||
|
slim[key] = value
|
||||||
|
|
||||||
|
trained_words = civitai.get('trainedWords')
|
||||||
|
if trained_words:
|
||||||
|
slim['trainedWords'] = list(trained_words) if isinstance(trained_words, list) else trained_words
|
||||||
|
|
||||||
|
return slim or None
|
||||||
|
|
||||||
|
def _build_cache_entry(
|
||||||
|
self,
|
||||||
|
source: Union[BaseModelMetadata, Mapping[str, Any]],
|
||||||
|
*,
|
||||||
|
folder: Optional[str] = None,
|
||||||
|
file_path_override: Optional[str] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Project metadata into the lightweight cache representation."""
|
||||||
|
is_mapping = isinstance(source, Mapping)
|
||||||
|
|
||||||
|
def get_value(key: str, default: Any = None) -> Any:
|
||||||
|
if is_mapping:
|
||||||
|
return source.get(key, default)
|
||||||
|
return getattr(source, key, default)
|
||||||
|
|
||||||
|
file_path = file_path_override or get_value('file_path', '') or ''
|
||||||
|
normalized_path = file_path.replace('\\', '/')
|
||||||
|
|
||||||
|
folder_value = folder if folder is not None else get_value('folder', '') or ''
|
||||||
|
normalized_folder = folder_value.replace('\\', '/')
|
||||||
|
|
||||||
|
tags_value = get_value('tags') or []
|
||||||
|
if isinstance(tags_value, list):
|
||||||
|
tags_list = list(tags_value)
|
||||||
|
elif isinstance(tags_value, (set, tuple)):
|
||||||
|
tags_list = list(tags_value)
|
||||||
|
else:
|
||||||
|
tags_list = []
|
||||||
|
|
||||||
|
preview_url = get_value('preview_url', '') or ''
|
||||||
|
if isinstance(preview_url, str):
|
||||||
|
preview_url = preview_url.replace('\\', '/')
|
||||||
|
else:
|
||||||
|
preview_url = ''
|
||||||
|
|
||||||
|
civitai_slim = self._slim_civitai_payload(get_value('civitai'))
|
||||||
|
usage_tips = get_value('usage_tips', '') or ''
|
||||||
|
if not isinstance(usage_tips, str):
|
||||||
|
usage_tips = str(usage_tips)
|
||||||
|
notes = get_value('notes', '') or ''
|
||||||
|
if not isinstance(notes, str):
|
||||||
|
notes = str(notes)
|
||||||
|
|
||||||
|
entry: Dict[str, Any] = {
|
||||||
|
'file_path': normalized_path,
|
||||||
|
'file_name': get_value('file_name', '') or '',
|
||||||
|
'model_name': get_value('model_name', '') or '',
|
||||||
|
'folder': normalized_folder,
|
||||||
|
'size': int(get_value('size', 0) or 0),
|
||||||
|
'modified': float(get_value('modified', 0.0) or 0.0),
|
||||||
|
'sha256': (get_value('sha256', '') or '').lower(),
|
||||||
|
'base_model': get_value('base_model', '') or '',
|
||||||
|
'preview_url': preview_url,
|
||||||
|
'preview_nsfw_level': int(get_value('preview_nsfw_level', 0) or 0),
|
||||||
|
'from_civitai': bool(get_value('from_civitai', True)),
|
||||||
|
'favorite': bool(get_value('favorite', False)),
|
||||||
|
'notes': notes,
|
||||||
|
'usage_tips': usage_tips,
|
||||||
|
'exclude': bool(get_value('exclude', False)),
|
||||||
|
'db_checked': bool(get_value('db_checked', False)),
|
||||||
|
'last_checked_at': float(get_value('last_checked_at', 0.0) or 0.0),
|
||||||
|
'tags': tags_list,
|
||||||
|
'civitai': civitai_slim,
|
||||||
|
'civitai_deleted': bool(get_value('civitai_deleted', False)),
|
||||||
|
}
|
||||||
|
|
||||||
|
model_type = get_value('model_type', None)
|
||||||
|
if model_type:
|
||||||
|
entry['model_type'] = model_type
|
||||||
|
|
||||||
|
return entry
|
||||||
|
|
||||||
async def initialize_in_background(self) -> None:
|
async def initialize_in_background(self) -> None:
|
||||||
"""Initialize cache in background using thread pool"""
|
"""Initialize cache in background using thread pool"""
|
||||||
try:
|
try:
|
||||||
@@ -91,7 +211,12 @@ class ModelScanner:
|
|||||||
self._is_initializing = True
|
self._is_initializing = True
|
||||||
|
|
||||||
# Determine the page type based on model type
|
# Determine the page type based on model type
|
||||||
page_type = 'loras' if self.model_type == 'lora' else 'checkpoints'
|
page_type_map = {
|
||||||
|
'lora': 'loras',
|
||||||
|
'checkpoint': 'checkpoints',
|
||||||
|
'embedding': 'embeddings'
|
||||||
|
}
|
||||||
|
page_type = page_type_map.get(self.model_type, self.model_type)
|
||||||
|
|
||||||
# First, try to load from cache
|
# First, try to load from cache
|
||||||
await ws_manager.broadcast_init_progress({
|
await ws_manager.broadcast_init_progress({
|
||||||
@@ -101,8 +226,25 @@ class ModelScanner:
|
|||||||
'scanner_type': self.model_type,
|
'scanner_type': self.model_type,
|
||||||
'pageType': page_type
|
'pageType': page_type
|
||||||
})
|
})
|
||||||
|
|
||||||
# If cache loading failed, proceed with full scan
|
cache_loaded = await self._load_persisted_cache(page_type)
|
||||||
|
|
||||||
|
if cache_loaded:
|
||||||
|
await asyncio.sleep(0) # Yield control so the UI can process the cache hydration update
|
||||||
|
await ws_manager.broadcast_init_progress({
|
||||||
|
'stage': 'finalizing',
|
||||||
|
'progress': 100,
|
||||||
|
'status': 'complete',
|
||||||
|
'details': f"Loaded {len(self._cache.raw_data)} cached {self.model_type} files from disk.",
|
||||||
|
'scanner_type': self.model_type,
|
||||||
|
'pageType': page_type
|
||||||
|
})
|
||||||
|
logger.info(
|
||||||
|
f"{self.model_type.capitalize()} cache hydrated from persisted snapshot with {len(self._cache.raw_data)} models"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Persistent load failed; fall back to a full scan
|
||||||
await ws_manager.broadcast_init_progress({
|
await ws_manager.broadcast_init_progress({
|
||||||
'stage': 'scan_folders',
|
'stage': 'scan_folders',
|
||||||
'progress': 0,
|
'progress': 0,
|
||||||
@@ -129,13 +271,17 @@ class ModelScanner:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
# Use thread pool to execute CPU-intensive operations with progress reporting
|
# Use thread pool to execute CPU-intensive operations with progress reporting
|
||||||
await loop.run_in_executor(
|
scan_result: Optional[CacheBuildResult] = await loop.run_in_executor(
|
||||||
None, # Use default thread pool
|
None, # Use default thread pool
|
||||||
self._initialize_cache_sync, # Run synchronous version in thread
|
self._initialize_cache_sync, # Run synchronous version in thread
|
||||||
total_files, # Pass the total file count for progress reporting
|
total_files, # Pass the total file count for progress reporting
|
||||||
page_type # Pass the page type for progress reporting
|
page_type # Pass the page type for progress reporting
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if scan_result:
|
||||||
|
await self._apply_scan_result(scan_result)
|
||||||
|
await self._save_persistent_cache(scan_result)
|
||||||
|
|
||||||
# Send final progress update
|
# Send final progress update
|
||||||
await ws_manager.broadcast_init_progress({
|
await ws_manager.broadcast_init_progress({
|
||||||
'stage': 'finalizing',
|
'stage': 'finalizing',
|
||||||
@@ -164,6 +310,105 @@ class ModelScanner:
|
|||||||
# Always clear the initializing flag when done
|
# Always clear the initializing flag when done
|
||||||
self._is_initializing = False
|
self._is_initializing = False
|
||||||
|
|
||||||
|
async def _load_persisted_cache(self, page_type: str) -> bool:
|
||||||
|
"""Attempt to hydrate the in-memory cache from the SQLite snapshot."""
|
||||||
|
if not getattr(self, '_persistent_cache', None):
|
||||||
|
return False
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
persisted = await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
self._persistent_cache.load_cache,
|
||||||
|
self.model_type
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return False
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("%s Scanner: Could not load persisted cache: %s", self.model_type.capitalize(), exc)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not persisted or not persisted.raw_data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
hash_index = ModelHashIndex()
|
||||||
|
for sha_value, path in persisted.hash_rows:
|
||||||
|
if sha_value and path:
|
||||||
|
hash_index.add_entry(sha_value.lower(), path)
|
||||||
|
|
||||||
|
tags_count: Dict[str, int] = {}
|
||||||
|
for item in persisted.raw_data:
|
||||||
|
for tag in item.get('tags') or []:
|
||||||
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
scan_result = CacheBuildResult(
|
||||||
|
raw_data=list(persisted.raw_data),
|
||||||
|
hash_index=hash_index,
|
||||||
|
tags_count=tags_count,
|
||||||
|
excluded_models=list(persisted.excluded_models)
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._apply_scan_result(scan_result)
|
||||||
|
|
||||||
|
await ws_manager.broadcast_init_progress({
|
||||||
|
'stage': 'loading_cache',
|
||||||
|
'progress': 1,
|
||||||
|
'details': f"Loaded cached {self.model_type} data from disk",
|
||||||
|
'scanner_type': self.model_type,
|
||||||
|
'pageType': page_type
|
||||||
|
})
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _save_persistent_cache(self, scan_result: CacheBuildResult) -> None:
|
||||||
|
if not scan_result or not getattr(self, '_persistent_cache', None):
|
||||||
|
return
|
||||||
|
|
||||||
|
hash_snapshot = self._build_hash_index_snapshot(scan_result.hash_index)
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
self._persistent_cache.save_cache,
|
||||||
|
self.model_type,
|
||||||
|
list(scan_result.raw_data),
|
||||||
|
hash_snapshot,
|
||||||
|
list(scan_result.excluded_models)
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("%s Scanner: Failed to persist cache: %s", self.model_type.capitalize(), exc)
|
||||||
|
|
||||||
|
def _build_hash_index_snapshot(self, hash_index: Optional[ModelHashIndex]) -> Dict[str, List[str]]:
|
||||||
|
snapshot: Dict[str, List[str]] = {}
|
||||||
|
if not hash_index:
|
||||||
|
return snapshot
|
||||||
|
|
||||||
|
for sha_value, path in getattr(hash_index, '_hash_to_path', {}).items():
|
||||||
|
if not sha_value or not path:
|
||||||
|
continue
|
||||||
|
bucket = snapshot.setdefault(sha_value.lower(), [])
|
||||||
|
if path not in bucket:
|
||||||
|
bucket.append(path)
|
||||||
|
|
||||||
|
for sha_value, paths in getattr(hash_index, '_duplicate_hashes', {}).items():
|
||||||
|
if not sha_value:
|
||||||
|
continue
|
||||||
|
bucket = snapshot.setdefault(sha_value.lower(), [])
|
||||||
|
for path in paths:
|
||||||
|
if path and path not in bucket:
|
||||||
|
bucket.append(path)
|
||||||
|
return snapshot
|
||||||
|
|
||||||
|
async def _persist_current_cache(self) -> None:
|
||||||
|
if self._cache is None or not getattr(self, '_persistent_cache', None):
|
||||||
|
return
|
||||||
|
|
||||||
|
snapshot = CacheBuildResult(
|
||||||
|
raw_data=list(self._cache.raw_data),
|
||||||
|
hash_index=self._hash_index,
|
||||||
|
tags_count=dict(self._tags_count),
|
||||||
|
excluded_models=list(self._excluded_models)
|
||||||
|
)
|
||||||
|
await self._save_persistent_cache(snapshot)
|
||||||
def _count_model_files(self) -> int:
|
def _count_model_files(self) -> int:
|
||||||
"""Count all model files with supported extensions in all roots
|
"""Count all model files with supported extensions in all roots
|
||||||
|
|
||||||
@@ -203,124 +448,53 @@ class ModelScanner:
|
|||||||
|
|
||||||
return total_files
|
return total_files
|
||||||
|
|
||||||
def _initialize_cache_sync(self, total_files=0, page_type='loras'):
|
def _initialize_cache_sync(self, total_files: int = 0, page_type: str = 'loras') -> Optional[CacheBuildResult]:
|
||||||
"""Synchronous version of cache initialization for thread pool execution"""
|
"""Synchronous version of cache initialization for thread pool execution"""
|
||||||
|
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
try:
|
try:
|
||||||
# Create a new event loop for this thread
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
# Create a synchronous method to bypass the async lock
|
last_progress_time = time.time()
|
||||||
def sync_initialize_cache():
|
last_progress_percent = 0
|
||||||
# Track progress
|
|
||||||
processed_files = 0
|
async def progress_callback(processed_files: int, expected_total: int) -> None:
|
||||||
last_progress_time = time.time()
|
nonlocal last_progress_time, last_progress_percent
|
||||||
last_progress_percent = 0
|
|
||||||
|
if expected_total <= 0:
|
||||||
# We need a wrapper around scan_all_models to track progress
|
return
|
||||||
# This is a local function that will run in our thread's event loop
|
|
||||||
async def scan_with_progress():
|
current_time = time.time()
|
||||||
nonlocal processed_files, last_progress_time, last_progress_percent
|
progress_percent = min(99, int(1 + (processed_files / expected_total) * 98))
|
||||||
|
|
||||||
# For storing raw model data
|
if progress_percent <= last_progress_percent:
|
||||||
all_models = []
|
return
|
||||||
|
|
||||||
# Process each model root
|
if current_time - last_progress_time <= 0.5 and processed_files != expected_total:
|
||||||
for root_path in self.get_model_roots():
|
return
|
||||||
if not os.path.exists(root_path):
|
|
||||||
continue
|
last_progress_percent = progress_percent
|
||||||
|
last_progress_time = current_time
|
||||||
# Track visited paths to avoid symlink loops
|
|
||||||
visited_paths = set()
|
await ws_manager.broadcast_init_progress({
|
||||||
|
'stage': 'process_models',
|
||||||
# Recursively process directory
|
'progress': progress_percent,
|
||||||
async def scan_dir_with_progress(path):
|
'details': f"Processing {self.model_type} files: {processed_files}/{expected_total}",
|
||||||
nonlocal processed_files, last_progress_time, last_progress_percent
|
'scanner_type': self.model_type,
|
||||||
|
'pageType': page_type
|
||||||
try:
|
})
|
||||||
real_path = os.path.realpath(path)
|
|
||||||
if real_path in visited_paths:
|
return loop.run_until_complete(
|
||||||
return
|
self._gather_model_data(
|
||||||
visited_paths.add(real_path)
|
total_files=total_files,
|
||||||
|
progress_callback=progress_callback
|
||||||
with os.scandir(path) as it:
|
)
|
||||||
entries = list(it)
|
)
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
if entry.is_file(follow_symlinks=True):
|
|
||||||
ext = os.path.splitext(entry.name)[1].lower()
|
|
||||||
if ext in self.file_extensions:
|
|
||||||
file_path = entry.path.replace(os.sep, "/")
|
|
||||||
result = await self._process_model_file(file_path, root_path)
|
|
||||||
if result:
|
|
||||||
all_models.append(result)
|
|
||||||
|
|
||||||
# Update progress counter
|
|
||||||
processed_files += 1
|
|
||||||
|
|
||||||
# Update progress periodically (not every file to avoid excessive updates)
|
|
||||||
current_time = time.time()
|
|
||||||
if total_files > 0 and (current_time - last_progress_time > 0.5 or processed_files == total_files):
|
|
||||||
# Adjusted progress calculation
|
|
||||||
progress_percent = min(99, int(1 + (processed_files / total_files) * 98))
|
|
||||||
if progress_percent > last_progress_percent:
|
|
||||||
last_progress_percent = progress_percent
|
|
||||||
last_progress_time = current_time
|
|
||||||
|
|
||||||
# Send progress update through websocket
|
|
||||||
await ws_manager.broadcast_init_progress({
|
|
||||||
'stage': 'process_models',
|
|
||||||
'progress': progress_percent,
|
|
||||||
'details': f"Processing {self.model_type} files: {processed_files}/{total_files}",
|
|
||||||
'scanner_type': self.model_type,
|
|
||||||
'pageType': page_type
|
|
||||||
})
|
|
||||||
|
|
||||||
elif entry.is_dir(follow_symlinks=True):
|
|
||||||
await scan_dir_with_progress(entry.path)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error processing entry {entry.path}: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error scanning {path}: {e}")
|
|
||||||
|
|
||||||
# Process the root path
|
|
||||||
await scan_dir_with_progress(root_path)
|
|
||||||
|
|
||||||
return all_models
|
|
||||||
|
|
||||||
# Run the progress-tracking scan function
|
|
||||||
raw_data = loop.run_until_complete(scan_with_progress())
|
|
||||||
|
|
||||||
# Update hash index and tags count
|
|
||||||
for model_data in raw_data:
|
|
||||||
if 'sha256' in model_data and 'file_path' in model_data:
|
|
||||||
self._hash_index.add_entry(model_data['sha256'].lower(), model_data['file_path'])
|
|
||||||
|
|
||||||
# Count tags
|
|
||||||
if 'tags' in model_data and model_data['tags']:
|
|
||||||
for tag in model_data['tags']:
|
|
||||||
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
|
|
||||||
|
|
||||||
# Log duplicate filename warnings after building the index
|
|
||||||
# duplicate_filenames = self._hash_index.get_duplicate_filenames()
|
|
||||||
# if duplicate_filenames:
|
|
||||||
# logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:")
|
|
||||||
# for filename, paths in duplicate_filenames.items():
|
|
||||||
# logger.warning(f" Duplicate filename '{filename}': {paths}")
|
|
||||||
|
|
||||||
# Update cache
|
|
||||||
self._cache.raw_data = raw_data
|
|
||||||
loop.run_until_complete(self._cache.resort())
|
|
||||||
|
|
||||||
return self._cache
|
|
||||||
|
|
||||||
# Run our sync initialization that avoids lock conflicts
|
|
||||||
return sync_initialize_cache()
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in thread-based {self.model_type} cache initialization: {e}")
|
logger.error(f"Error in thread-based {self.model_type} cache initialization: {e}")
|
||||||
|
return None
|
||||||
finally:
|
finally:
|
||||||
# Clean up the event loop
|
asyncio.set_event_loop(None)
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
async def get_cached_data(self, force_refresh: bool = False, rebuild_cache: bool = False) -> ModelCache:
|
async def get_cached_data(self, force_refresh: bool = False, rebuild_cache: bool = False) -> ModelCache:
|
||||||
@@ -352,45 +526,16 @@ class ModelScanner:
|
|||||||
self._is_initializing = True # Set flag
|
self._is_initializing = True # Set flag
|
||||||
try:
|
try:
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
# Clear existing hash index
|
|
||||||
self._hash_index.clear()
|
|
||||||
|
|
||||||
# Clear existing tags count
|
|
||||||
self._tags_count = {}
|
|
||||||
|
|
||||||
# Determine the page type based on model type
|
# Determine the page type based on model type
|
||||||
page_type = 'loras' if self.model_type == 'lora' else 'checkpoints'
|
|
||||||
|
|
||||||
# Scan for new data
|
# Scan for new data
|
||||||
raw_data = await self.scan_all_models()
|
scan_result = await self._gather_model_data()
|
||||||
|
await self._apply_scan_result(scan_result)
|
||||||
# Build hash index and tags count
|
await self._save_persistent_cache(scan_result)
|
||||||
for model_data in raw_data:
|
|
||||||
if 'sha256' in model_data and 'file_path' in model_data:
|
|
||||||
self._hash_index.add_entry(model_data['sha256'].lower(), model_data['file_path'])
|
|
||||||
|
|
||||||
# Count tags
|
|
||||||
if 'tags' in model_data and model_data['tags']:
|
|
||||||
for tag in model_data['tags']:
|
|
||||||
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
|
|
||||||
|
|
||||||
# Log duplicate filename warnings after building the index
|
|
||||||
# duplicate_filenames = self._hash_index.get_duplicate_filenames()
|
|
||||||
# if duplicate_filenames:
|
|
||||||
# logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:")
|
|
||||||
# for filename, paths in duplicate_filenames.items():
|
|
||||||
# logger.warning(f" Duplicate filename '{filename}': {paths}")
|
|
||||||
|
|
||||||
# Update cache
|
|
||||||
self._cache = ModelCache(
|
|
||||||
raw_data=raw_data,
|
|
||||||
folders=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Resort cache
|
|
||||||
await self._cache.resort()
|
|
||||||
|
|
||||||
logger.info(f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, found {len(raw_data)} models")
|
logger.info(
|
||||||
|
f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, "
|
||||||
|
f"found {len(scan_result.raw_data)} models"
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"{self.model_type.capitalize()} Scanner: Error initializing cache: {e}")
|
logger.error(f"{self.model_type.capitalize()} Scanner: Error initializing cache: {e}")
|
||||||
# Ensure cache is at least an empty structure on error
|
# Ensure cache is at least an empty structure on error
|
||||||
@@ -475,8 +620,12 @@ class ModelScanner:
|
|||||||
try:
|
try:
|
||||||
# Find the appropriate root path for this file
|
# Find the appropriate root path for this file
|
||||||
root_path = None
|
root_path = None
|
||||||
for potential_root in self.get_model_roots():
|
model_roots = self.get_model_roots()
|
||||||
if path.startswith(potential_root):
|
for potential_root in model_roots:
|
||||||
|
# Normalize both paths for comparison
|
||||||
|
normalized_path = os.path.normpath(path)
|
||||||
|
normalized_root = os.path.normpath(potential_root)
|
||||||
|
if normalized_path.startswith(normalized_root):
|
||||||
root_path = potential_root
|
root_path = potential_root
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -534,70 +683,17 @@ class ModelScanner:
|
|||||||
# Update folders list
|
# Update folders list
|
||||||
all_folders = set(item.get('folder', '') for item in self._cache.raw_data)
|
all_folders = set(item.get('folder', '') for item in self._cache.raw_data)
|
||||||
self._cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
|
self._cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
|
||||||
|
|
||||||
# Resort cache
|
# Resort cache
|
||||||
await self._cache.resort()
|
await self._cache.resort()
|
||||||
|
|
||||||
|
await self._persist_current_cache()
|
||||||
|
|
||||||
logger.info(f"{self.model_type.capitalize()} Scanner: Cache reconciliation completed in {time.time() - start_time:.2f} seconds. Added {total_added}, removed {total_removed} models.")
|
logger.info(f"{self.model_type.capitalize()} Scanner: Cache reconciliation completed in {time.time() - start_time:.2f} seconds. Added {total_added}, removed {total_removed} models.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"{self.model_type.capitalize()} Scanner: Error reconciling cache: {e}", exc_info=True)
|
logger.error(f"{self.model_type.capitalize()} Scanner: Error reconciling cache: {e}", exc_info=True)
|
||||||
finally:
|
finally:
|
||||||
self._is_initializing = False # Unset flag
|
self._is_initializing = False # Unset flag
|
||||||
|
|
||||||
async def scan_all_models(self) -> List[Dict]:
|
|
||||||
"""Scan all model directories and return metadata"""
|
|
||||||
all_models = []
|
|
||||||
|
|
||||||
# Create scan tasks for each directory
|
|
||||||
scan_tasks = []
|
|
||||||
for model_root in self.get_model_roots():
|
|
||||||
task = asyncio.create_task(self._scan_directory(model_root))
|
|
||||||
scan_tasks.append(task)
|
|
||||||
|
|
||||||
# Wait for all tasks to complete
|
|
||||||
for task in scan_tasks:
|
|
||||||
try:
|
|
||||||
models = await task
|
|
||||||
all_models.extend(models)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error scanning directory: {e}")
|
|
||||||
|
|
||||||
return all_models
|
|
||||||
|
|
||||||
async def _scan_directory(self, root_path: str) -> List[Dict]:
|
|
||||||
"""Scan a single directory for model files"""
|
|
||||||
models = []
|
|
||||||
original_root = root_path # Save original root path
|
|
||||||
|
|
||||||
async def scan_recursive(path: str, visited_paths: set):
|
|
||||||
"""Recursively scan directory, avoiding circular symlinks"""
|
|
||||||
try:
|
|
||||||
real_path = os.path.realpath(path)
|
|
||||||
if real_path in visited_paths:
|
|
||||||
logger.debug(f"Skipping already visited path: {path}")
|
|
||||||
return
|
|
||||||
visited_paths.add(real_path)
|
|
||||||
|
|
||||||
with os.scandir(path) as it:
|
|
||||||
entries = list(it)
|
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
if entry.is_file(follow_symlinks=True) and any(entry.name.endswith(ext) for ext in self.file_extensions):
|
|
||||||
file_path = entry.path.replace(os.sep, "/")
|
|
||||||
result = await self._process_model_file(file_path, original_root)
|
|
||||||
# Only add to models if result is not None (skip corrupted metadata)
|
|
||||||
if result:
|
|
||||||
models.append(result)
|
|
||||||
await asyncio.sleep(0)
|
|
||||||
elif entry.is_dir(follow_symlinks=True):
|
|
||||||
await scan_recursive(entry.path, visited_paths)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error processing entry {entry.path}: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error scanning {path}: {e}")
|
|
||||||
|
|
||||||
await scan_recursive(root_path, set())
|
|
||||||
return models
|
|
||||||
|
|
||||||
def is_initializing(self) -> bool:
|
def is_initializing(self) -> bool:
|
||||||
"""Check if the scanner is currently initializing"""
|
"""Check if the scanner is currently initializing"""
|
||||||
@@ -623,8 +719,18 @@ class ModelScanner:
|
|||||||
"""Hook for subclasses: adjust metadata during scanning"""
|
"""Hook for subclasses: adjust metadata during scanning"""
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
async def _process_model_file(self, file_path: str, root_path: str) -> Dict:
|
async def _process_model_file(
|
||||||
|
self,
|
||||||
|
file_path: str,
|
||||||
|
root_path: str,
|
||||||
|
*,
|
||||||
|
hash_index: Optional[ModelHashIndex] = None,
|
||||||
|
excluded_models: Optional[List[str]] = None
|
||||||
|
) -> Dict:
|
||||||
"""Process a single model file and return its metadata"""
|
"""Process a single model file and return its metadata"""
|
||||||
|
hash_index = hash_index or self._hash_index
|
||||||
|
excluded_models = excluded_models if excluded_models is not None else self._excluded_models
|
||||||
|
|
||||||
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.model_class)
|
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.model_class)
|
||||||
|
|
||||||
if should_skip:
|
if should_skip:
|
||||||
@@ -684,30 +790,134 @@ class ModelScanner:
|
|||||||
# Hook: allow subclasses to adjust metadata
|
# Hook: allow subclasses to adjust metadata
|
||||||
metadata = self.adjust_metadata(metadata, file_path, root_path)
|
metadata = self.adjust_metadata(metadata, file_path, root_path)
|
||||||
|
|
||||||
model_data = metadata.to_dict()
|
|
||||||
|
|
||||||
# Skip excluded models
|
|
||||||
if model_data.get('exclude', False):
|
|
||||||
self._excluded_models.append(model_data['file_path'])
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Check for duplicate filename before adding to hash index
|
|
||||||
filename = os.path.splitext(os.path.basename(file_path))[0]
|
|
||||||
existing_hash = self._hash_index.get_hash_by_filename(filename)
|
|
||||||
if existing_hash and existing_hash != model_data.get('sha256', '').lower():
|
|
||||||
existing_path = self._hash_index.get_path(existing_hash)
|
|
||||||
if existing_path and existing_path != file_path:
|
|
||||||
logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
|
|
||||||
|
|
||||||
rel_path = os.path.relpath(file_path, root_path)
|
rel_path = os.path.relpath(file_path, root_path)
|
||||||
folder = os.path.dirname(rel_path)
|
folder = os.path.dirname(rel_path)
|
||||||
model_data['folder'] = folder.replace(os.path.sep, '/')
|
normalized_folder = folder.replace(os.path.sep, '/')
|
||||||
|
|
||||||
|
model_data = self._build_cache_entry(metadata, folder=normalized_folder)
|
||||||
|
|
||||||
|
# Skip excluded models
|
||||||
|
if model_data.get('exclude', False):
|
||||||
|
excluded_models.append(model_data['file_path'])
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Check for duplicate filename before adding to hash index
|
||||||
|
# filename = os.path.splitext(os.path.basename(file_path))[0]
|
||||||
|
# existing_hash = hash_index.get_hash_by_filename(filename)
|
||||||
|
# if existing_hash and existing_hash != model_data.get('sha256', '').lower():
|
||||||
|
# existing_path = hash_index.get_path(existing_hash)
|
||||||
|
# if existing_path and existing_path != file_path:
|
||||||
|
# logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
|
||||||
|
|
||||||
return model_data
|
return model_data
|
||||||
|
|
||||||
|
async def _apply_scan_result(self, scan_result: CacheBuildResult) -> None:
|
||||||
|
"""Apply scan results to the cache and associated indexes."""
|
||||||
|
|
||||||
|
if scan_result is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._hash_index = scan_result.hash_index
|
||||||
|
self._tags_count = dict(scan_result.tags_count)
|
||||||
|
self._excluded_models = list(scan_result.excluded_models)
|
||||||
|
|
||||||
|
if self._cache is None:
|
||||||
|
self._cache = ModelCache(
|
||||||
|
raw_data=list(scan_result.raw_data),
|
||||||
|
folders=[]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._cache.raw_data = list(scan_result.raw_data)
|
||||||
|
|
||||||
|
await self._cache.resort()
|
||||||
|
|
||||||
|
async def _gather_model_data(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
total_files: int = 0,
|
||||||
|
progress_callback: Optional[Callable[[int, int], Awaitable[None]]] = None
|
||||||
|
) -> CacheBuildResult:
|
||||||
|
"""Collect metadata for all model files."""
|
||||||
|
|
||||||
|
raw_data: List[Dict] = []
|
||||||
|
hash_index = ModelHashIndex()
|
||||||
|
tags_count: Dict[str, int] = {}
|
||||||
|
excluded_models: List[str] = []
|
||||||
|
processed_files = 0
|
||||||
|
|
||||||
|
async def handle_progress() -> None:
|
||||||
|
if progress_callback is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
await progress_callback(processed_files, total_files)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error(f"Error reporting progress for {self.model_type}: {exc}")
|
||||||
|
|
||||||
|
async def scan_recursive(current_path: str, root_path: str, visited_paths: Set[str]) -> None:
|
||||||
|
nonlocal processed_files
|
||||||
|
|
||||||
|
try:
|
||||||
|
real_path = os.path.realpath(current_path)
|
||||||
|
if real_path in visited_paths:
|
||||||
|
return
|
||||||
|
visited_paths.add(real_path)
|
||||||
|
|
||||||
|
with os.scandir(current_path) as iterator:
|
||||||
|
entries = list(iterator)
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
try:
|
||||||
|
if entry.is_file(follow_symlinks=True):
|
||||||
|
ext = os.path.splitext(entry.name)[1].lower()
|
||||||
|
if ext not in self.file_extensions:
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_path = entry.path.replace(os.sep, "/")
|
||||||
|
result = await self._process_model_file(
|
||||||
|
file_path,
|
||||||
|
root_path,
|
||||||
|
hash_index=hash_index,
|
||||||
|
excluded_models=excluded_models
|
||||||
|
)
|
||||||
|
|
||||||
|
processed_files += 1
|
||||||
|
|
||||||
|
if result:
|
||||||
|
raw_data.append(result)
|
||||||
|
|
||||||
|
sha_value = result.get('sha256')
|
||||||
|
model_path = result.get('file_path')
|
||||||
|
if sha_value and model_path:
|
||||||
|
hash_index.add_entry(sha_value.lower(), model_path)
|
||||||
|
|
||||||
|
for tag in result.get('tags') or []:
|
||||||
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
await handle_progress()
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
elif entry.is_dir(follow_symlinks=True):
|
||||||
|
await scan_recursive(entry.path, root_path, visited_paths)
|
||||||
|
except Exception as entry_error:
|
||||||
|
logger.error(f"Error processing entry {entry.path}: {entry_error}")
|
||||||
|
except Exception as scan_error:
|
||||||
|
logger.error(f"Error scanning {current_path}: {scan_error}")
|
||||||
|
|
||||||
|
for model_root in self.get_model_roots():
|
||||||
|
if not os.path.exists(model_root):
|
||||||
|
continue
|
||||||
|
|
||||||
|
await scan_recursive(model_root, model_root, set())
|
||||||
|
|
||||||
|
return CacheBuildResult(
|
||||||
|
raw_data=raw_data,
|
||||||
|
hash_index=hash_index,
|
||||||
|
tags_count=tags_count,
|
||||||
|
excluded_models=excluded_models
|
||||||
|
)
|
||||||
|
|
||||||
async def add_model_to_cache(self, metadata_dict: Dict, folder: str = '') -> bool:
|
async def add_model_to_cache(self, metadata_dict: Dict, folder: str = '') -> bool:
|
||||||
"""Add a model to the cache
|
"""Add a model to the cache
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
metadata_dict: The model metadata dictionary
|
metadata_dict: The model metadata dictionary
|
||||||
folder: The relative folder path for the model
|
folder: The relative folder path for the model
|
||||||
@@ -735,6 +945,7 @@ class ModelScanner:
|
|||||||
|
|
||||||
# Update the hash index
|
# Update the hash index
|
||||||
self._hash_index.add_entry(metadata_dict['sha256'], metadata_dict['file_path'])
|
self._hash_index.add_entry(metadata_dict['sha256'], metadata_dict['file_path'])
|
||||||
|
await self._persist_current_cache()
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error adding model to cache: {e}")
|
logger.error(f"Error adding model to cache: {e}")
|
||||||
@@ -863,7 +1074,7 @@ class ModelScanner:
|
|||||||
async def update_single_model_cache(self, original_path: str, new_path: str, metadata: Dict) -> bool:
|
async def update_single_model_cache(self, original_path: str, new_path: str, metadata: Dict) -> bool:
|
||||||
"""Update cache after a model has been moved or modified"""
|
"""Update cache after a model has been moved or modified"""
|
||||||
cache = await self.get_cached_data()
|
cache = await self.get_cached_data()
|
||||||
|
|
||||||
existing_item = next((item for item in cache.raw_data if item['file_path'] == original_path), None)
|
existing_item = next((item for item in cache.raw_data if item['file_path'] == original_path), None)
|
||||||
if existing_item and 'tags' in existing_item:
|
if existing_item and 'tags' in existing_item:
|
||||||
for tag in existing_item.get('tags', []):
|
for tag in existing_item.get('tags', []):
|
||||||
@@ -875,35 +1086,42 @@ class ModelScanner:
|
|||||||
self._hash_index.remove_by_path(original_path)
|
self._hash_index.remove_by_path(original_path)
|
||||||
|
|
||||||
cache.raw_data = [
|
cache.raw_data = [
|
||||||
item for item in cache.raw_data
|
item for item in cache.raw_data
|
||||||
if item['file_path'] != original_path
|
if item['file_path'] != original_path
|
||||||
]
|
]
|
||||||
|
|
||||||
|
cache_modified = bool(existing_item) or bool(metadata)
|
||||||
|
|
||||||
if metadata:
|
if metadata:
|
||||||
if original_path == new_path:
|
normalized_new_path = new_path.replace(os.sep, '/')
|
||||||
existing_folder = next((item['folder'] for item in cache.raw_data
|
if original_path == new_path and existing_item:
|
||||||
if item['file_path'] == original_path), None)
|
folder_value = existing_item.get('folder', self._calculate_folder(new_path))
|
||||||
if existing_folder:
|
|
||||||
metadata['folder'] = existing_folder
|
|
||||||
else:
|
|
||||||
metadata['folder'] = self._calculate_folder(new_path)
|
|
||||||
else:
|
else:
|
||||||
metadata['folder'] = self._calculate_folder(new_path)
|
folder_value = self._calculate_folder(new_path)
|
||||||
|
|
||||||
cache.raw_data.append(metadata)
|
cache_entry = self._build_cache_entry(
|
||||||
|
metadata,
|
||||||
if 'sha256' in metadata:
|
folder=folder_value,
|
||||||
self._hash_index.add_entry(metadata['sha256'].lower(), new_path)
|
file_path_override=normalized_new_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
cache.raw_data.append(cache_entry)
|
||||||
|
|
||||||
|
sha_value = cache_entry.get('sha256')
|
||||||
|
if sha_value:
|
||||||
|
self._hash_index.add_entry(sha_value.lower(), normalized_new_path)
|
||||||
|
|
||||||
all_folders = set(item['folder'] for item in cache.raw_data)
|
all_folders = set(item['folder'] for item in cache.raw_data)
|
||||||
cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
|
cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
|
||||||
|
|
||||||
if 'tags' in metadata:
|
for tag in cache_entry.get('tags', []):
|
||||||
for tag in metadata.get('tags', []):
|
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
|
||||||
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
|
|
||||||
|
|
||||||
await cache.resort()
|
await cache.resort()
|
||||||
|
|
||||||
|
if cache_modified:
|
||||||
|
await self._persist_current_cache()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def has_hash(self, sha256: str) -> bool:
|
def has_hash(self, sha256: str) -> bool:
|
||||||
@@ -1005,7 +1223,10 @@ class ModelScanner:
|
|||||||
if self._cache is None:
|
if self._cache is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return await self._cache.update_preview_url(file_path, preview_url, preview_nsfw_level)
|
updated = await self._cache.update_preview_url(file_path, preview_url, preview_nsfw_level)
|
||||||
|
if updated:
|
||||||
|
await self._persist_current_cache()
|
||||||
|
return updated
|
||||||
|
|
||||||
async def bulk_delete_models(self, file_paths: List[str]) -> Dict:
|
async def bulk_delete_models(self, file_paths: List[str]) -> Dict:
|
||||||
"""Delete multiple models and update cache in a batch operation
|
"""Delete multiple models and update cache in a batch operation
|
||||||
@@ -1040,10 +1261,8 @@ class ModelScanner:
|
|||||||
target_dir = os.path.dirname(file_path)
|
target_dir = os.path.dirname(file_path)
|
||||||
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||||
|
|
||||||
# Delete all associated files for the model
|
deleted_files = await delete_model_artifacts(
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
target_dir,
|
||||||
deleted_files = await ModelRouteUtils.delete_model_files(
|
|
||||||
target_dir,
|
|
||||||
file_name
|
file_name
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1136,7 +1355,9 @@ class ModelScanner:
|
|||||||
|
|
||||||
# Resort cache
|
# Resort cache
|
||||||
await self._cache.resort()
|
await self._cache.resort()
|
||||||
|
|
||||||
|
await self._persist_current_cache()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
357
py/services/persistent_model_cache.py
Normal file
357
py/services/persistent_model_cache.py
Normal file
@@ -0,0 +1,357 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sqlite3
|
||||||
|
import threading
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Dict, List, Optional, Sequence, Tuple
|
||||||
|
|
||||||
|
from ..utils.settings_paths import get_settings_dir
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PersistedCacheData:
|
||||||
|
"""Lightweight structure returned by the persistent cache."""
|
||||||
|
|
||||||
|
raw_data: List[Dict]
|
||||||
|
hash_rows: List[Tuple[str, str]]
|
||||||
|
excluded_models: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class PersistentModelCache:
|
||||||
|
"""Persist core model metadata and hash index data in SQLite."""
|
||||||
|
|
||||||
|
_DEFAULT_FILENAME = "model_cache.sqlite"
|
||||||
|
_instances: Dict[str, "PersistentModelCache"] = {}
|
||||||
|
_instance_lock = threading.Lock()
|
||||||
|
|
||||||
|
def __init__(self, library_name: str = "default", db_path: Optional[str] = None) -> None:
|
||||||
|
self._library_name = library_name or "default"
|
||||||
|
self._db_path = db_path or self._resolve_default_path(self._library_name)
|
||||||
|
self._db_lock = threading.Lock()
|
||||||
|
self._schema_initialized = False
|
||||||
|
try:
|
||||||
|
directory = os.path.dirname(self._db_path)
|
||||||
|
if directory:
|
||||||
|
os.makedirs(directory, exist_ok=True)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.warning("Could not create cache directory %s: %s", directory, exc)
|
||||||
|
if self.is_enabled():
|
||||||
|
self._initialize_schema()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_default(cls, library_name: Optional[str] = None) -> "PersistentModelCache":
|
||||||
|
name = (library_name or "default")
|
||||||
|
with cls._instance_lock:
|
||||||
|
if name not in cls._instances:
|
||||||
|
cls._instances[name] = cls(name)
|
||||||
|
return cls._instances[name]
|
||||||
|
|
||||||
|
def is_enabled(self) -> bool:
|
||||||
|
return os.environ.get("LORA_MANAGER_DISABLE_PERSISTENT_CACHE", "0") != "1"
|
||||||
|
|
||||||
|
def load_cache(self, model_type: str) -> Optional[PersistedCacheData]:
|
||||||
|
if not self.is_enabled():
|
||||||
|
return None
|
||||||
|
if not self._schema_initialized:
|
||||||
|
self._initialize_schema()
|
||||||
|
if not self._schema_initialized:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
with self._db_lock:
|
||||||
|
conn = self._connect(readonly=True)
|
||||||
|
try:
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT file_path, file_name, model_name, folder, size, modified, sha256, base_model,"
|
||||||
|
" preview_url, preview_nsfw_level, from_civitai, favorite, notes, usage_tips,"
|
||||||
|
" civitai_id, civitai_model_id, civitai_name, trained_words, exclude, db_checked,"
|
||||||
|
" last_checked_at"
|
||||||
|
" FROM models WHERE model_type = ?",
|
||||||
|
(model_type,),
|
||||||
|
).fetchall()
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
return None
|
||||||
|
|
||||||
|
tags = self._load_tags(conn, model_type)
|
||||||
|
hash_rows = conn.execute(
|
||||||
|
"SELECT sha256, file_path FROM hash_index WHERE model_type = ?",
|
||||||
|
(model_type,),
|
||||||
|
).fetchall()
|
||||||
|
excluded = conn.execute(
|
||||||
|
"SELECT file_path FROM excluded_models WHERE model_type = ?",
|
||||||
|
(model_type,),
|
||||||
|
).fetchall()
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Failed to load persisted cache for %s: %s", model_type, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
raw_data: List[Dict] = []
|
||||||
|
for row in rows:
|
||||||
|
file_path: str = row["file_path"]
|
||||||
|
trained_words = []
|
||||||
|
if row["trained_words"]:
|
||||||
|
try:
|
||||||
|
trained_words = json.loads(row["trained_words"])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
trained_words = []
|
||||||
|
|
||||||
|
civitai: Optional[Dict] = None
|
||||||
|
if any(row[col] is not None for col in ("civitai_id", "civitai_model_id", "civitai_name")):
|
||||||
|
civitai = {}
|
||||||
|
if row["civitai_id"] is not None:
|
||||||
|
civitai["id"] = row["civitai_id"]
|
||||||
|
if row["civitai_model_id"] is not None:
|
||||||
|
civitai["modelId"] = row["civitai_model_id"]
|
||||||
|
if row["civitai_name"]:
|
||||||
|
civitai["name"] = row["civitai_name"]
|
||||||
|
if trained_words:
|
||||||
|
civitai["trainedWords"] = trained_words
|
||||||
|
|
||||||
|
item = {
|
||||||
|
"file_path": file_path,
|
||||||
|
"file_name": row["file_name"],
|
||||||
|
"model_name": row["model_name"],
|
||||||
|
"folder": row["folder"] or "",
|
||||||
|
"size": row["size"] or 0,
|
||||||
|
"modified": row["modified"] or 0.0,
|
||||||
|
"sha256": row["sha256"] or "",
|
||||||
|
"base_model": row["base_model"] or "",
|
||||||
|
"preview_url": row["preview_url"] or "",
|
||||||
|
"preview_nsfw_level": row["preview_nsfw_level"] or 0,
|
||||||
|
"from_civitai": bool(row["from_civitai"]),
|
||||||
|
"favorite": bool(row["favorite"]),
|
||||||
|
"notes": row["notes"] or "",
|
||||||
|
"usage_tips": row["usage_tips"] or "",
|
||||||
|
"exclude": bool(row["exclude"]),
|
||||||
|
"db_checked": bool(row["db_checked"]),
|
||||||
|
"last_checked_at": row["last_checked_at"] or 0.0,
|
||||||
|
"tags": tags.get(file_path, []),
|
||||||
|
"civitai": civitai,
|
||||||
|
}
|
||||||
|
raw_data.append(item)
|
||||||
|
|
||||||
|
hash_pairs = [(entry["sha256"].lower(), entry["file_path"]) for entry in hash_rows if entry["sha256"]]
|
||||||
|
if not hash_pairs:
|
||||||
|
# Fall back to hashes stored on the model rows
|
||||||
|
for item in raw_data:
|
||||||
|
sha_value = item.get("sha256")
|
||||||
|
if sha_value:
|
||||||
|
hash_pairs.append((sha_value.lower(), item["file_path"]))
|
||||||
|
|
||||||
|
excluded_paths = [row["file_path"] for row in excluded]
|
||||||
|
return PersistedCacheData(raw_data=raw_data, hash_rows=hash_pairs, excluded_models=excluded_paths)
|
||||||
|
|
||||||
|
def save_cache(self, model_type: str, raw_data: Sequence[Dict], hash_index: Dict[str, List[str]], excluded_models: Sequence[str]) -> None:
|
||||||
|
if not self.is_enabled():
|
||||||
|
return
|
||||||
|
if not self._schema_initialized:
|
||||||
|
self._initialize_schema()
|
||||||
|
if not self._schema_initialized:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
with self._db_lock:
|
||||||
|
conn = self._connect()
|
||||||
|
try:
|
||||||
|
conn.execute("PRAGMA foreign_keys = ON")
|
||||||
|
conn.execute("DELETE FROM models WHERE model_type = ?", (model_type,))
|
||||||
|
conn.execute("DELETE FROM model_tags WHERE model_type = ?", (model_type,))
|
||||||
|
conn.execute("DELETE FROM hash_index WHERE model_type = ?", (model_type,))
|
||||||
|
conn.execute("DELETE FROM excluded_models WHERE model_type = ?", (model_type,))
|
||||||
|
|
||||||
|
model_rows = [self._prepare_model_row(model_type, item) for item in raw_data]
|
||||||
|
conn.executemany(self._insert_model_sql(), model_rows)
|
||||||
|
|
||||||
|
tag_rows = []
|
||||||
|
for item in raw_data:
|
||||||
|
file_path = item.get("file_path")
|
||||||
|
if not file_path:
|
||||||
|
continue
|
||||||
|
for tag in item.get("tags") or []:
|
||||||
|
tag_rows.append((model_type, file_path, tag))
|
||||||
|
if tag_rows:
|
||||||
|
conn.executemany(
|
||||||
|
"INSERT INTO model_tags (model_type, file_path, tag) VALUES (?, ?, ?)",
|
||||||
|
tag_rows,
|
||||||
|
)
|
||||||
|
|
||||||
|
hash_rows: List[Tuple[str, str, str]] = []
|
||||||
|
for sha_value, paths in hash_index.items():
|
||||||
|
for path in paths:
|
||||||
|
if not sha_value or not path:
|
||||||
|
continue
|
||||||
|
hash_rows.append((model_type, sha_value.lower(), path))
|
||||||
|
if hash_rows:
|
||||||
|
conn.executemany(
|
||||||
|
"INSERT OR IGNORE INTO hash_index (model_type, sha256, file_path) VALUES (?, ?, ?)",
|
||||||
|
hash_rows,
|
||||||
|
)
|
||||||
|
|
||||||
|
excluded_rows = [(model_type, path) for path in excluded_models]
|
||||||
|
if excluded_rows:
|
||||||
|
conn.executemany(
|
||||||
|
"INSERT OR IGNORE INTO excluded_models (model_type, file_path) VALUES (?, ?)",
|
||||||
|
excluded_rows,
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Failed to persist cache for %s: %s", model_type, exc)
|
||||||
|
|
||||||
|
# Internal helpers -------------------------------------------------
|
||||||
|
|
||||||
|
def _resolve_default_path(self, library_name: str) -> str:
|
||||||
|
override = os.environ.get("LORA_MANAGER_CACHE_DB")
|
||||||
|
if override:
|
||||||
|
return override
|
||||||
|
try:
|
||||||
|
settings_dir = get_settings_dir(create=True)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.warning("Falling back to project directory for cache: %s", exc)
|
||||||
|
settings_dir = os.path.dirname(os.path.dirname(self._db_path)) if hasattr(self, "_db_path") else os.getcwd()
|
||||||
|
safe_name = re.sub(r"[^A-Za-z0-9_.-]", "_", library_name or "default")
|
||||||
|
if safe_name.lower() in ("default", ""):
|
||||||
|
legacy_path = os.path.join(settings_dir, self._DEFAULT_FILENAME)
|
||||||
|
if os.path.exists(legacy_path):
|
||||||
|
return legacy_path
|
||||||
|
return os.path.join(settings_dir, "model_cache", f"{safe_name}.sqlite")
|
||||||
|
|
||||||
|
def _initialize_schema(self) -> None:
|
||||||
|
with self._db_lock:
|
||||||
|
if self._schema_initialized:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
with self._connect() as conn:
|
||||||
|
conn.execute("PRAGMA journal_mode=WAL")
|
||||||
|
conn.execute("PRAGMA foreign_keys = ON")
|
||||||
|
conn.executescript(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS models (
|
||||||
|
model_type TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
file_name TEXT,
|
||||||
|
model_name TEXT,
|
||||||
|
folder TEXT,
|
||||||
|
size INTEGER,
|
||||||
|
modified REAL,
|
||||||
|
sha256 TEXT,
|
||||||
|
base_model TEXT,
|
||||||
|
preview_url TEXT,
|
||||||
|
preview_nsfw_level INTEGER,
|
||||||
|
from_civitai INTEGER,
|
||||||
|
favorite INTEGER,
|
||||||
|
notes TEXT,
|
||||||
|
usage_tips TEXT,
|
||||||
|
civitai_id INTEGER,
|
||||||
|
civitai_model_id INTEGER,
|
||||||
|
civitai_name TEXT,
|
||||||
|
trained_words TEXT,
|
||||||
|
exclude INTEGER,
|
||||||
|
db_checked INTEGER,
|
||||||
|
last_checked_at REAL,
|
||||||
|
PRIMARY KEY (model_type, file_path)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS model_tags (
|
||||||
|
model_type TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
tag TEXT NOT NULL,
|
||||||
|
PRIMARY KEY (model_type, file_path, tag)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS hash_index (
|
||||||
|
model_type TEXT NOT NULL,
|
||||||
|
sha256 TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
PRIMARY KEY (model_type, sha256, file_path)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS excluded_models (
|
||||||
|
model_type TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
PRIMARY KEY (model_type, file_path)
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
self._schema_initialized = True
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.warning("Failed to initialize persistent cache schema: %s", exc)
|
||||||
|
|
||||||
|
def _connect(self, readonly: bool = False) -> sqlite3.Connection:
|
||||||
|
uri = False
|
||||||
|
path = self._db_path
|
||||||
|
if readonly:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise FileNotFoundError(path)
|
||||||
|
path = f"file:{path}?mode=ro"
|
||||||
|
uri = True
|
||||||
|
conn = sqlite3.connect(path, check_same_thread=False, uri=uri, detect_types=sqlite3.PARSE_DECLTYPES)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
def _prepare_model_row(self, model_type: str, item: Dict) -> Tuple:
|
||||||
|
civitai = item.get("civitai") or {}
|
||||||
|
trained_words = civitai.get("trainedWords")
|
||||||
|
if isinstance(trained_words, str):
|
||||||
|
trained_words_json = trained_words
|
||||||
|
elif trained_words is None:
|
||||||
|
trained_words_json = None
|
||||||
|
else:
|
||||||
|
trained_words_json = json.dumps(trained_words)
|
||||||
|
|
||||||
|
return (
|
||||||
|
model_type,
|
||||||
|
item.get("file_path"),
|
||||||
|
item.get("file_name"),
|
||||||
|
item.get("model_name"),
|
||||||
|
item.get("folder"),
|
||||||
|
int(item.get("size") or 0),
|
||||||
|
float(item.get("modified") or 0.0),
|
||||||
|
(item.get("sha256") or "").lower() or None,
|
||||||
|
item.get("base_model"),
|
||||||
|
item.get("preview_url"),
|
||||||
|
int(item.get("preview_nsfw_level") or 0),
|
||||||
|
1 if item.get("from_civitai", True) else 0,
|
||||||
|
1 if item.get("favorite") else 0,
|
||||||
|
item.get("notes"),
|
||||||
|
item.get("usage_tips"),
|
||||||
|
civitai.get("id"),
|
||||||
|
civitai.get("modelId"),
|
||||||
|
civitai.get("name"),
|
||||||
|
trained_words_json,
|
||||||
|
1 if item.get("exclude") else 0,
|
||||||
|
1 if item.get("db_checked") else 0,
|
||||||
|
float(item.get("last_checked_at") or 0.0),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _insert_model_sql(self) -> str:
|
||||||
|
return (
|
||||||
|
"INSERT INTO models (model_type, file_path, file_name, model_name, folder, size, modified, sha256,"
|
||||||
|
" base_model, preview_url, preview_nsfw_level, from_civitai, favorite, notes, usage_tips,"
|
||||||
|
" civitai_id, civitai_model_id, civitai_name, trained_words, exclude, db_checked, last_checked_at)"
|
||||||
|
" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_tags(self, conn: sqlite3.Connection, model_type: str) -> Dict[str, List[str]]:
|
||||||
|
tag_rows = conn.execute(
|
||||||
|
"SELECT file_path, tag FROM model_tags WHERE model_type = ?",
|
||||||
|
(model_type,),
|
||||||
|
).fetchall()
|
||||||
|
result: Dict[str, List[str]] = {}
|
||||||
|
for row in tag_rows:
|
||||||
|
result.setdefault(row["file_path"], []).append(row["tag"])
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_persistent_cache() -> PersistentModelCache:
|
||||||
|
from .settings_manager import settings as settings_service # Local import to avoid cycles
|
||||||
|
|
||||||
|
library_name = settings_service.get_active_library_name()
|
||||||
|
return PersistentModelCache.get_default(library_name)
|
||||||
168
py/services/preview_asset_service.py
Normal file
168
py/services/preview_asset_service.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
"""Service for processing preview assets for models."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Awaitable, Callable, Dict, Optional, Sequence
|
||||||
|
|
||||||
|
from ..utils.constants import CARD_PREVIEW_WIDTH, PREVIEW_EXTENSIONS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PreviewAssetService:
|
||||||
|
"""Manage fetching and persisting preview assets."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
metadata_manager,
|
||||||
|
downloader_factory: Callable[[], Awaitable],
|
||||||
|
exif_utils,
|
||||||
|
) -> None:
|
||||||
|
self._metadata_manager = metadata_manager
|
||||||
|
self._downloader_factory = downloader_factory
|
||||||
|
self._exif_utils = exif_utils
|
||||||
|
|
||||||
|
async def ensure_preview_for_metadata(
|
||||||
|
self,
|
||||||
|
metadata_path: str,
|
||||||
|
local_metadata: Dict[str, object],
|
||||||
|
images: Sequence[Dict[str, object]] | None,
|
||||||
|
) -> None:
|
||||||
|
"""Ensure preview assets exist for the supplied metadata entry."""
|
||||||
|
|
||||||
|
if local_metadata.get("preview_url") and os.path.exists(
|
||||||
|
str(local_metadata["preview_url"])
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
if not images:
|
||||||
|
return
|
||||||
|
|
||||||
|
first_preview = images[0]
|
||||||
|
base_name = os.path.splitext(os.path.splitext(os.path.basename(metadata_path))[0])[0]
|
||||||
|
preview_dir = os.path.dirname(metadata_path)
|
||||||
|
is_video = first_preview.get("type") == "video"
|
||||||
|
|
||||||
|
if is_video:
|
||||||
|
extension = ".mp4"
|
||||||
|
preview_path = os.path.join(preview_dir, base_name + extension)
|
||||||
|
downloader = await self._downloader_factory()
|
||||||
|
success, result = await downloader.download_file(
|
||||||
|
first_preview["url"], preview_path, use_auth=False
|
||||||
|
)
|
||||||
|
if success:
|
||||||
|
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
|
||||||
|
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
|
||||||
|
else:
|
||||||
|
extension = ".webp"
|
||||||
|
preview_path = os.path.join(preview_dir, base_name + extension)
|
||||||
|
downloader = await self._downloader_factory()
|
||||||
|
success, content, _headers = await downloader.download_to_memory(
|
||||||
|
first_preview["url"], use_auth=False
|
||||||
|
)
|
||||||
|
if not success:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
optimized_data, _ = self._exif_utils.optimize_image(
|
||||||
|
image_data=content,
|
||||||
|
target_width=CARD_PREVIEW_WIDTH,
|
||||||
|
format="webp",
|
||||||
|
quality=85,
|
||||||
|
preserve_metadata=False,
|
||||||
|
)
|
||||||
|
with open(preview_path, "wb") as handle:
|
||||||
|
handle.write(optimized_data)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive path
|
||||||
|
logger.error("Error optimizing preview image: %s", exc)
|
||||||
|
try:
|
||||||
|
with open(preview_path, "wb") as handle:
|
||||||
|
handle.write(content)
|
||||||
|
except Exception as save_exc:
|
||||||
|
logger.error("Error saving preview image: %s", save_exc)
|
||||||
|
return
|
||||||
|
|
||||||
|
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
|
||||||
|
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
|
||||||
|
|
||||||
|
async def replace_preview(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
model_path: str,
|
||||||
|
preview_data: bytes,
|
||||||
|
content_type: str,
|
||||||
|
original_filename: Optional[str],
|
||||||
|
nsfw_level: int,
|
||||||
|
update_preview_in_cache: Callable[[str, str, int], Awaitable[bool]],
|
||||||
|
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
|
||||||
|
) -> Dict[str, object]:
|
||||||
|
"""Replace an existing preview asset for a model."""
|
||||||
|
|
||||||
|
base_name = os.path.splitext(os.path.basename(model_path))[0]
|
||||||
|
folder = os.path.dirname(model_path)
|
||||||
|
|
||||||
|
extension, optimized_data = await self._convert_preview(
|
||||||
|
preview_data, content_type, original_filename
|
||||||
|
)
|
||||||
|
|
||||||
|
for ext in PREVIEW_EXTENSIONS:
|
||||||
|
existing_preview = os.path.join(folder, base_name + ext)
|
||||||
|
if os.path.exists(existing_preview):
|
||||||
|
try:
|
||||||
|
os.remove(existing_preview)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive path
|
||||||
|
logger.warning(
|
||||||
|
"Failed to delete existing preview %s: %s", existing_preview, exc
|
||||||
|
)
|
||||||
|
|
||||||
|
preview_path = os.path.join(folder, base_name + extension).replace(os.sep, "/")
|
||||||
|
with open(preview_path, "wb") as handle:
|
||||||
|
handle.write(optimized_data)
|
||||||
|
|
||||||
|
metadata_path = os.path.splitext(model_path)[0] + ".metadata.json"
|
||||||
|
metadata = await metadata_loader(metadata_path)
|
||||||
|
metadata["preview_url"] = preview_path
|
||||||
|
metadata["preview_nsfw_level"] = nsfw_level
|
||||||
|
await self._metadata_manager.save_metadata(model_path, metadata)
|
||||||
|
|
||||||
|
await update_preview_in_cache(model_path, preview_path, nsfw_level)
|
||||||
|
|
||||||
|
return {"preview_path": preview_path, "preview_nsfw_level": nsfw_level}
|
||||||
|
|
||||||
|
async def _convert_preview(
|
||||||
|
self, data: bytes, content_type: str, original_filename: Optional[str]
|
||||||
|
) -> tuple[str, bytes]:
|
||||||
|
"""Convert preview bytes to the persisted representation."""
|
||||||
|
|
||||||
|
if content_type.startswith("video/"):
|
||||||
|
extension = self._resolve_video_extension(content_type, original_filename)
|
||||||
|
return extension, data
|
||||||
|
|
||||||
|
original_ext = (original_filename or "").lower()
|
||||||
|
if original_ext.endswith(".gif") or content_type.lower() == "image/gif":
|
||||||
|
return ".gif", data
|
||||||
|
|
||||||
|
optimized_data, _ = self._exif_utils.optimize_image(
|
||||||
|
image_data=data,
|
||||||
|
target_width=CARD_PREVIEW_WIDTH,
|
||||||
|
format="webp",
|
||||||
|
quality=85,
|
||||||
|
preserve_metadata=False,
|
||||||
|
)
|
||||||
|
return ".webp", optimized_data
|
||||||
|
|
||||||
|
def _resolve_video_extension(self, content_type: str, original_filename: Optional[str]) -> str:
|
||||||
|
"""Infer the best extension for a video preview."""
|
||||||
|
|
||||||
|
if original_filename:
|
||||||
|
extension = os.path.splitext(original_filename)[1].lower()
|
||||||
|
if extension in {".mp4", ".webm", ".mov", ".avi"}:
|
||||||
|
return extension
|
||||||
|
|
||||||
|
if "webm" in content_type:
|
||||||
|
return ".webm"
|
||||||
|
return ".mp4"
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import List, Dict
|
from typing import Iterable, List, Dict, Optional
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
@@ -10,77 +10,115 @@ class RecipeCache:
|
|||||||
raw_data: List[Dict]
|
raw_data: List[Dict]
|
||||||
sorted_by_name: List[Dict]
|
sorted_by_name: List[Dict]
|
||||||
sorted_by_date: List[Dict]
|
sorted_by_date: List[Dict]
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
self._lock = asyncio.Lock()
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
async def resort(self, name_only: bool = False):
|
async def resort(self, name_only: bool = False):
|
||||||
"""Resort all cached data views"""
|
"""Resort all cached data views"""
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
self.sorted_by_name = natsorted(
|
self._resort_locked(name_only=name_only)
|
||||||
self.raw_data,
|
|
||||||
key=lambda x: x.get('title', '').lower() # Case-insensitive sort
|
async def update_recipe_metadata(self, recipe_id: str, metadata: Dict, *, resort: bool = True) -> bool:
|
||||||
)
|
|
||||||
if not name_only:
|
|
||||||
self.sorted_by_date = sorted(
|
|
||||||
self.raw_data,
|
|
||||||
key=itemgetter('created_date', 'file_path'),
|
|
||||||
reverse=True
|
|
||||||
)
|
|
||||||
|
|
||||||
async def update_recipe_metadata(self, recipe_id: str, metadata: Dict) -> bool:
|
|
||||||
"""Update metadata for a specific recipe in all cached data
|
"""Update metadata for a specific recipe in all cached data
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
recipe_id: The ID of the recipe to update
|
recipe_id: The ID of the recipe to update
|
||||||
metadata: The new metadata
|
metadata: The new metadata
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the update was successful, False if the recipe wasn't found
|
bool: True if the update was successful, False if the recipe wasn't found
|
||||||
"""
|
"""
|
||||||
|
async with self._lock:
|
||||||
|
for item in self.raw_data:
|
||||||
|
if str(item.get('id')) == str(recipe_id):
|
||||||
|
item.update(metadata)
|
||||||
|
if resort:
|
||||||
|
self._resort_locked()
|
||||||
|
return True
|
||||||
|
return False # Recipe not found
|
||||||
|
|
||||||
|
async def add_recipe(self, recipe_data: Dict, *, resort: bool = False) -> None:
|
||||||
|
"""Add a new recipe to the cache."""
|
||||||
|
|
||||||
# Update in raw_data
|
|
||||||
for item in self.raw_data:
|
|
||||||
if item.get('id') == recipe_id:
|
|
||||||
item.update(metadata)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
return False # Recipe not found
|
|
||||||
|
|
||||||
# Resort to reflect changes
|
|
||||||
await self.resort()
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def add_recipe(self, recipe_data: Dict) -> None:
|
|
||||||
"""Add a new recipe to the cache
|
|
||||||
|
|
||||||
Args:
|
|
||||||
recipe_data: The recipe data to add
|
|
||||||
"""
|
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
self.raw_data.append(recipe_data)
|
self.raw_data.append(recipe_data)
|
||||||
await self.resort()
|
if resort:
|
||||||
|
self._resort_locked()
|
||||||
|
|
||||||
|
async def remove_recipe(self, recipe_id: str, *, resort: bool = False) -> Optional[Dict]:
|
||||||
|
"""Remove a recipe from the cache by ID.
|
||||||
|
|
||||||
async def remove_recipe(self, recipe_id: str) -> bool:
|
|
||||||
"""Remove a recipe from the cache by ID
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
recipe_id: The ID of the recipe to remove
|
recipe_id: The ID of the recipe to remove
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the recipe was found and removed, False otherwise
|
The removed recipe data if found, otherwise ``None``.
|
||||||
"""
|
"""
|
||||||
# Find the recipe in raw_data
|
|
||||||
recipe_index = next((i for i, recipe in enumerate(self.raw_data)
|
async with self._lock:
|
||||||
if recipe.get('id') == recipe_id), None)
|
for index, recipe in enumerate(self.raw_data):
|
||||||
|
if str(recipe.get('id')) == str(recipe_id):
|
||||||
if recipe_index is None:
|
removed = self.raw_data.pop(index)
|
||||||
return False
|
if resort:
|
||||||
|
self._resort_locked()
|
||||||
# Remove from raw_data
|
return removed
|
||||||
self.raw_data.pop(recipe_index)
|
return None
|
||||||
|
|
||||||
# Resort to update sorted lists
|
async def bulk_remove(self, recipe_ids: Iterable[str], *, resort: bool = False) -> List[Dict]:
|
||||||
await self.resort()
|
"""Remove multiple recipes from the cache."""
|
||||||
|
|
||||||
return True
|
id_set = {str(recipe_id) for recipe_id in recipe_ids}
|
||||||
|
if not id_set:
|
||||||
|
return []
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
removed = [item for item in self.raw_data if str(item.get('id')) in id_set]
|
||||||
|
if not removed:
|
||||||
|
return []
|
||||||
|
|
||||||
|
self.raw_data = [item for item in self.raw_data if str(item.get('id')) not in id_set]
|
||||||
|
if resort:
|
||||||
|
self._resort_locked()
|
||||||
|
return removed
|
||||||
|
|
||||||
|
async def replace_recipe(self, recipe_id: str, new_data: Dict, *, resort: bool = False) -> bool:
|
||||||
|
"""Replace cached data for a recipe."""
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
for index, recipe in enumerate(self.raw_data):
|
||||||
|
if str(recipe.get('id')) == str(recipe_id):
|
||||||
|
self.raw_data[index] = new_data
|
||||||
|
if resort:
|
||||||
|
self._resort_locked()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def get_recipe(self, recipe_id: str) -> Optional[Dict]:
|
||||||
|
"""Return a shallow copy of a cached recipe."""
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
for recipe in self.raw_data:
|
||||||
|
if str(recipe.get('id')) == str(recipe_id):
|
||||||
|
return dict(recipe)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def snapshot(self) -> List[Dict]:
|
||||||
|
"""Return a copy of all cached recipes."""
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
return [dict(item) for item in self.raw_data]
|
||||||
|
|
||||||
|
def _resort_locked(self, *, name_only: bool = False) -> None:
|
||||||
|
"""Sort cached views. Caller must hold ``_lock``."""
|
||||||
|
|
||||||
|
self.sorted_by_name = natsorted(
|
||||||
|
self.raw_data,
|
||||||
|
key=lambda x: x.get('title', '').lower()
|
||||||
|
)
|
||||||
|
if not name_only:
|
||||||
|
self.sorted_by_date = sorted(
|
||||||
|
self.raw_data,
|
||||||
|
key=itemgetter('created_date', 'file_path'),
|
||||||
|
reverse=True
|
||||||
|
)
|
||||||
@@ -3,13 +3,14 @@ import logging
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from typing import List, Dict, Optional, Any, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||||
from ..config import config
|
from ..config import config
|
||||||
from .recipe_cache import RecipeCache
|
from .recipe_cache import RecipeCache
|
||||||
from .service_registry import ServiceRegistry
|
from .service_registry import ServiceRegistry
|
||||||
from .lora_scanner import LoraScanner
|
from .lora_scanner import LoraScanner
|
||||||
from .metadata_service import get_default_metadata_provider
|
from .metadata_service import get_default_metadata_provider
|
||||||
from ..utils.utils import fuzzy_match
|
from .recipes.errors import RecipeNotFoundError
|
||||||
|
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -46,9 +47,36 @@ class RecipeScanner:
|
|||||||
self._initialization_lock = asyncio.Lock()
|
self._initialization_lock = asyncio.Lock()
|
||||||
self._initialization_task: Optional[asyncio.Task] = None
|
self._initialization_task: Optional[asyncio.Task] = None
|
||||||
self._is_initializing = False
|
self._is_initializing = False
|
||||||
|
self._mutation_lock = asyncio.Lock()
|
||||||
|
self._resort_tasks: Set[asyncio.Task] = set()
|
||||||
if lora_scanner:
|
if lora_scanner:
|
||||||
self._lora_scanner = lora_scanner
|
self._lora_scanner = lora_scanner
|
||||||
self._initialized = True
|
self._initialized = True
|
||||||
|
|
||||||
|
def on_library_changed(self) -> None:
|
||||||
|
"""Reset cached state when the active library changes."""
|
||||||
|
|
||||||
|
# Cancel any in-flight initialization or resorting work so the next
|
||||||
|
# access rebuilds the cache for the new library.
|
||||||
|
if self._initialization_task and not self._initialization_task.done():
|
||||||
|
self._initialization_task.cancel()
|
||||||
|
|
||||||
|
for task in list(self._resort_tasks):
|
||||||
|
if not task.done():
|
||||||
|
task.cancel()
|
||||||
|
self._resort_tasks.clear()
|
||||||
|
|
||||||
|
self._cache = None
|
||||||
|
self._initialization_task = None
|
||||||
|
self._is_initializing = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
loop = None
|
||||||
|
|
||||||
|
if loop and not loop.is_closed():
|
||||||
|
loop.create_task(self.initialize_in_background())
|
||||||
|
|
||||||
async def _get_civitai_client(self):
|
async def _get_civitai_client(self):
|
||||||
"""Lazily initialize CivitaiClient from registry"""
|
"""Lazily initialize CivitaiClient from registry"""
|
||||||
@@ -191,6 +219,22 @@ class RecipeScanner:
|
|||||||
# Clean up the event loop
|
# Clean up the event loop
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
|
def _schedule_resort(self, *, name_only: bool = False) -> None:
|
||||||
|
"""Schedule a background resort of the recipe cache."""
|
||||||
|
|
||||||
|
if not self._cache:
|
||||||
|
return
|
||||||
|
|
||||||
|
async def _resort_wrapper() -> None:
|
||||||
|
try:
|
||||||
|
await self._cache.resort(name_only=name_only)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Recipe Scanner: error resorting cache: %s", exc, exc_info=True)
|
||||||
|
|
||||||
|
task = asyncio.create_task(_resort_wrapper())
|
||||||
|
self._resort_tasks.add(task)
|
||||||
|
task.add_done_callback(lambda finished: self._resort_tasks.discard(finished))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def recipes_dir(self) -> str:
|
def recipes_dir(self) -> str:
|
||||||
"""Get path to recipes directory"""
|
"""Get path to recipes directory"""
|
||||||
@@ -255,7 +299,45 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Return the cache (may be empty or partially initialized)
|
# Return the cache (may be empty or partially initialized)
|
||||||
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
|
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
|
||||||
|
|
||||||
|
async def refresh_cache(self, force: bool = False) -> RecipeCache:
|
||||||
|
"""Public helper to refresh or return the recipe cache."""
|
||||||
|
|
||||||
|
return await self.get_cached_data(force_refresh=force)
|
||||||
|
|
||||||
|
async def add_recipe(self, recipe_data: Dict[str, Any]) -> None:
|
||||||
|
"""Add a recipe to the in-memory cache."""
|
||||||
|
|
||||||
|
if not recipe_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
await cache.add_recipe(recipe_data, resort=False)
|
||||||
|
self._schedule_resort()
|
||||||
|
|
||||||
|
async def remove_recipe(self, recipe_id: str) -> bool:
|
||||||
|
"""Remove a recipe from the cache by ID."""
|
||||||
|
|
||||||
|
if not recipe_id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
removed = await cache.remove_recipe(recipe_id, resort=False)
|
||||||
|
if removed is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._schedule_resort()
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def bulk_remove(self, recipe_ids: Iterable[str]) -> int:
|
||||||
|
"""Remove multiple recipes from the cache."""
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
removed = await cache.bulk_remove(recipe_ids, resort=False)
|
||||||
|
if removed:
|
||||||
|
self._schedule_resort()
|
||||||
|
return len(removed)
|
||||||
|
|
||||||
async def scan_all_recipes(self) -> List[Dict]:
|
async def scan_all_recipes(self) -> List[Dict]:
|
||||||
"""Scan all recipe JSON files and return metadata"""
|
"""Scan all recipe JSON files and return metadata"""
|
||||||
recipes = []
|
recipes = []
|
||||||
@@ -326,7 +408,6 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Calculate and update fingerprint if missing
|
# Calculate and update fingerprint if missing
|
||||||
if 'loras' in recipe_data and 'fingerprint' not in recipe_data:
|
if 'loras' in recipe_data and 'fingerprint' not in recipe_data:
|
||||||
from ..utils.utils import calculate_recipe_fingerprint
|
|
||||||
fingerprint = calculate_recipe_fingerprint(recipe_data['loras'])
|
fingerprint = calculate_recipe_fingerprint(recipe_data['loras'])
|
||||||
recipe_data['fingerprint'] = fingerprint
|
recipe_data['fingerprint'] = fingerprint
|
||||||
|
|
||||||
@@ -368,27 +449,29 @@ class RecipeScanner:
|
|||||||
# If has modelVersionId but no hash, look in lora cache first, then fetch from Civitai
|
# If has modelVersionId but no hash, look in lora cache first, then fetch from Civitai
|
||||||
if 'modelVersionId' in lora and not lora.get('hash'):
|
if 'modelVersionId' in lora and not lora.get('hash'):
|
||||||
model_version_id = lora['modelVersionId']
|
model_version_id = lora['modelVersionId']
|
||||||
|
# Check if model_version_id is an integer and > 0
|
||||||
|
if isinstance(model_version_id, int) and model_version_id > 0:
|
||||||
|
|
||||||
# Try to find in lora cache first
|
# Try to find in lora cache first
|
||||||
hash_from_cache = await self._find_hash_in_lora_cache(model_version_id)
|
hash_from_cache = await self._find_hash_in_lora_cache(model_version_id)
|
||||||
if hash_from_cache:
|
if hash_from_cache:
|
||||||
lora['hash'] = hash_from_cache
|
lora['hash'] = hash_from_cache
|
||||||
metadata_updated = True
|
metadata_updated = True
|
||||||
else:
|
|
||||||
# If not in cache, fetch from Civitai
|
|
||||||
result = await self._get_hash_from_civitai(model_version_id)
|
|
||||||
if isinstance(result, tuple):
|
|
||||||
hash_from_civitai, is_deleted = result
|
|
||||||
if hash_from_civitai:
|
|
||||||
lora['hash'] = hash_from_civitai
|
|
||||||
metadata_updated = True
|
|
||||||
elif is_deleted:
|
|
||||||
# Mark the lora as deleted if it was not found on Civitai
|
|
||||||
lora['isDeleted'] = True
|
|
||||||
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
|
|
||||||
metadata_updated = True
|
|
||||||
else:
|
else:
|
||||||
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
|
# If not in cache, fetch from Civitai
|
||||||
|
result = await self._get_hash_from_civitai(model_version_id)
|
||||||
|
if isinstance(result, tuple):
|
||||||
|
hash_from_civitai, is_deleted = result
|
||||||
|
if hash_from_civitai:
|
||||||
|
lora['hash'] = hash_from_civitai
|
||||||
|
metadata_updated = True
|
||||||
|
elif is_deleted:
|
||||||
|
# Mark the lora as deleted if it was not found on Civitai
|
||||||
|
lora['isDeleted'] = True
|
||||||
|
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
|
||||||
|
metadata_updated = True
|
||||||
|
else:
|
||||||
|
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
|
||||||
|
|
||||||
# If has hash but no file_name, look up in lora library
|
# If has hash but no file_name, look up in lora library
|
||||||
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
|
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
|
||||||
@@ -497,9 +580,36 @@ class RecipeScanner:
|
|||||||
logger.error(f"Error getting base model for lora: {e}")
|
logger.error(f"Error getting base model for lora: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _enrich_lora_entry(self, lora: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Populate convenience fields for a LoRA entry."""
|
||||||
|
|
||||||
|
if not lora or not self._lora_scanner:
|
||||||
|
return lora
|
||||||
|
|
||||||
|
hash_value = (lora.get('hash') or '').lower()
|
||||||
|
if not hash_value:
|
||||||
|
return lora
|
||||||
|
|
||||||
|
try:
|
||||||
|
lora['inLibrary'] = self._lora_scanner.has_hash(hash_value)
|
||||||
|
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(hash_value)
|
||||||
|
lora['localPath'] = self._lora_scanner.get_path_by_hash(hash_value)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug("Error enriching lora entry %s: %s", hash_value, exc)
|
||||||
|
|
||||||
|
return lora
|
||||||
|
|
||||||
|
async def get_local_lora(self, name: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Lookup a local LoRA model by name."""
|
||||||
|
|
||||||
|
if not self._lora_scanner or not name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await self._lora_scanner.get_model_info_by_name(name)
|
||||||
|
|
||||||
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True):
|
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True):
|
||||||
"""Get paginated and filtered recipe data
|
"""Get paginated and filtered recipe data
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
page: Current page number (1-based)
|
page: Current page number (1-based)
|
||||||
page_size: Number of items per page
|
page_size: Number of items per page
|
||||||
@@ -598,16 +708,12 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Get paginated items
|
# Get paginated items
|
||||||
paginated_items = filtered_data[start_idx:end_idx]
|
paginated_items = filtered_data[start_idx:end_idx]
|
||||||
|
|
||||||
# Add inLibrary information for each lora
|
# Add inLibrary information for each lora
|
||||||
for item in paginated_items:
|
for item in paginated_items:
|
||||||
if 'loras' in item:
|
if 'loras' in item:
|
||||||
for lora in item['loras']:
|
item['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in item['loras']]
|
||||||
if 'hash' in lora and lora['hash']:
|
|
||||||
lora['inLibrary'] = self._lora_scanner.has_hash(lora['hash'].lower())
|
|
||||||
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(lora['hash'].lower())
|
|
||||||
lora['localPath'] = self._lora_scanner.get_path_by_hash(lora['hash'].lower())
|
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
'items': paginated_items,
|
'items': paginated_items,
|
||||||
'total': total_items,
|
'total': total_items,
|
||||||
@@ -653,33 +759,25 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Add lora metadata
|
# Add lora metadata
|
||||||
if 'loras' in formatted_recipe:
|
if 'loras' in formatted_recipe:
|
||||||
for lora in formatted_recipe['loras']:
|
formatted_recipe['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in formatted_recipe['loras']]
|
||||||
if 'hash' in lora and lora['hash']:
|
|
||||||
lora_hash = lora['hash'].lower()
|
|
||||||
lora['inLibrary'] = self._lora_scanner.has_hash(lora_hash)
|
|
||||||
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(lora_hash)
|
|
||||||
lora['localPath'] = self._lora_scanner.get_path_by_hash(lora_hash)
|
|
||||||
|
|
||||||
return formatted_recipe
|
return formatted_recipe
|
||||||
|
|
||||||
def _format_file_url(self, file_path: str) -> str:
|
def _format_file_url(self, file_path: str) -> str:
|
||||||
"""Format file path as URL for serving in web UI"""
|
"""Format file path as URL for serving in web UI"""
|
||||||
if not file_path:
|
if not file_path:
|
||||||
return '/loras_static/images/no-preview.png'
|
return '/loras_static/images/no-preview.png'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Format file path as a URL that will work with static file serving
|
normalized_path = os.path.normpath(file_path)
|
||||||
recipes_dir = os.path.join(config.loras_roots[0], "recipes").replace(os.sep, '/')
|
static_url = config.get_preview_static_url(normalized_path)
|
||||||
if file_path.replace(os.sep, '/').startswith(recipes_dir):
|
if static_url:
|
||||||
relative_path = os.path.relpath(file_path, config.loras_roots[0]).replace(os.sep, '/')
|
return static_url
|
||||||
return f"/loras_static/root1/preview/{relative_path}"
|
|
||||||
|
|
||||||
# If not in recipes dir, try to create a valid URL from the file name
|
|
||||||
file_name = os.path.basename(file_path)
|
|
||||||
return f"/loras_static/root1/preview/recipes/{file_name}"
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error formatting file URL: {e}")
|
logger.error(f"Error formatting file URL: {e}")
|
||||||
return '/loras_static/images/no-preview.png'
|
return '/loras_static/images/no-preview.png'
|
||||||
|
|
||||||
|
return '/loras_static/images/no-preview.png'
|
||||||
|
|
||||||
def _format_timestamp(self, timestamp: float) -> str:
|
def _format_timestamp(self, timestamp: float) -> str:
|
||||||
"""Format timestamp for display"""
|
"""Format timestamp for display"""
|
||||||
@@ -717,26 +815,159 @@ class RecipeScanner:
|
|||||||
# Save updated recipe
|
# Save updated recipe
|
||||||
with open(recipe_json_path, 'w', encoding='utf-8') as f:
|
with open(recipe_json_path, 'w', encoding='utf-8') as f:
|
||||||
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
|
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
# Update the cache if it exists
|
# Update the cache if it exists
|
||||||
if self._cache is not None:
|
if self._cache is not None:
|
||||||
await self._cache.update_recipe_metadata(recipe_id, metadata)
|
await self._cache.update_recipe_metadata(recipe_id, metadata, resort=False)
|
||||||
|
self._schedule_resort()
|
||||||
|
|
||||||
# If the recipe has an image, update its EXIF metadata
|
# If the recipe has an image, update its EXIF metadata
|
||||||
from ..utils.exif_utils import ExifUtils
|
from ..utils.exif_utils import ExifUtils
|
||||||
image_path = recipe_data.get('file_path')
|
image_path = recipe_data.get('file_path')
|
||||||
if image_path and os.path.exists(image_path):
|
if image_path and os.path.exists(image_path):
|
||||||
ExifUtils.append_recipe_metadata(image_path, recipe_data)
|
ExifUtils.append_recipe_metadata(image_path, recipe_data)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import logging
|
import logging
|
||||||
logging.getLogger(__name__).error(f"Error updating recipe metadata: {e}", exc_info=True)
|
logging.getLogger(__name__).error(f"Error updating recipe metadata: {e}", exc_info=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
async def update_lora_entry(
|
||||||
|
self,
|
||||||
|
recipe_id: str,
|
||||||
|
lora_index: int,
|
||||||
|
*,
|
||||||
|
target_name: str,
|
||||||
|
target_lora: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
||||||
|
"""Update a specific LoRA entry within a recipe.
|
||||||
|
|
||||||
|
Returns the updated recipe data and the refreshed LoRA metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if target_name is None:
|
||||||
|
raise ValueError("target_name must be provided")
|
||||||
|
|
||||||
|
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
if not os.path.exists(recipe_json_path):
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
async with self._mutation_lock:
|
||||||
|
with open(recipe_json_path, 'r', encoding='utf-8') as file_obj:
|
||||||
|
recipe_data = json.load(file_obj)
|
||||||
|
|
||||||
|
loras = recipe_data.get('loras', [])
|
||||||
|
if lora_index >= len(loras):
|
||||||
|
raise RecipeNotFoundError("LoRA index out of range in recipe")
|
||||||
|
|
||||||
|
lora_entry = loras[lora_index]
|
||||||
|
lora_entry['isDeleted'] = False
|
||||||
|
lora_entry['exclude'] = False
|
||||||
|
lora_entry['file_name'] = target_name
|
||||||
|
|
||||||
|
if target_lora is not None:
|
||||||
|
sha_value = target_lora.get('sha256') or target_lora.get('sha')
|
||||||
|
if sha_value:
|
||||||
|
lora_entry['hash'] = sha_value.lower()
|
||||||
|
|
||||||
|
civitai_info = target_lora.get('civitai') or {}
|
||||||
|
if civitai_info:
|
||||||
|
lora_entry['modelName'] = civitai_info.get('model', {}).get('name', '')
|
||||||
|
lora_entry['modelVersionName'] = civitai_info.get('name', '')
|
||||||
|
lora_entry['modelVersionId'] = civitai_info.get('id')
|
||||||
|
|
||||||
|
recipe_data['fingerprint'] = calculate_recipe_fingerprint(recipe_data.get('loras', []))
|
||||||
|
recipe_data['modified'] = time.time()
|
||||||
|
|
||||||
|
with open(recipe_json_path, 'w', encoding='utf-8') as file_obj:
|
||||||
|
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
replaced = await cache.replace_recipe(recipe_id, recipe_data, resort=False)
|
||||||
|
if not replaced:
|
||||||
|
await cache.add_recipe(recipe_data, resort=False)
|
||||||
|
self._schedule_resort()
|
||||||
|
|
||||||
|
updated_lora = dict(lora_entry)
|
||||||
|
if target_lora is not None:
|
||||||
|
preview_url = target_lora.get('preview_url')
|
||||||
|
if preview_url:
|
||||||
|
updated_lora['preview_url'] = config.get_preview_static_url(preview_url)
|
||||||
|
if target_lora.get('file_path'):
|
||||||
|
updated_lora['localPath'] = target_lora['file_path']
|
||||||
|
|
||||||
|
updated_lora = self._enrich_lora_entry(updated_lora)
|
||||||
|
return recipe_data, updated_lora
|
||||||
|
|
||||||
|
async def get_recipes_for_lora(self, lora_hash: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Return recipes that reference a given LoRA hash."""
|
||||||
|
|
||||||
|
if not lora_hash:
|
||||||
|
return []
|
||||||
|
|
||||||
|
normalized_hash = lora_hash.lower()
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
matching_recipes: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
for recipe in cache.raw_data:
|
||||||
|
loras = recipe.get('loras', [])
|
||||||
|
if any((entry.get('hash') or '').lower() == normalized_hash for entry in loras):
|
||||||
|
recipe_copy = {**recipe}
|
||||||
|
recipe_copy['loras'] = [self._enrich_lora_entry(dict(entry)) for entry in loras]
|
||||||
|
recipe_copy['file_url'] = self._format_file_url(recipe.get('file_path'))
|
||||||
|
matching_recipes.append(recipe_copy)
|
||||||
|
|
||||||
|
return matching_recipes
|
||||||
|
|
||||||
|
async def get_recipe_syntax_tokens(self, recipe_id: str) -> List[str]:
|
||||||
|
"""Build LoRA syntax tokens for a recipe."""
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
recipe = await cache.get_recipe(recipe_id)
|
||||||
|
if recipe is None:
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
loras = recipe.get('loras', [])
|
||||||
|
if not loras:
|
||||||
|
return []
|
||||||
|
|
||||||
|
lora_cache = None
|
||||||
|
if self._lora_scanner is not None:
|
||||||
|
lora_cache = await self._lora_scanner.get_cached_data()
|
||||||
|
|
||||||
|
syntax_parts: List[str] = []
|
||||||
|
for lora in loras:
|
||||||
|
if lora.get('isDeleted', False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_name = None
|
||||||
|
hash_value = (lora.get('hash') or '').lower()
|
||||||
|
if hash_value and self._lora_scanner is not None and hasattr(self._lora_scanner, '_hash_index'):
|
||||||
|
file_path = self._lora_scanner._hash_index.get_path(hash_value)
|
||||||
|
if file_path:
|
||||||
|
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||||
|
|
||||||
|
if not file_name and lora.get('modelVersionId') and lora_cache is not None:
|
||||||
|
for cached_lora in getattr(lora_cache, 'raw_data', []):
|
||||||
|
civitai_info = cached_lora.get('civitai')
|
||||||
|
if civitai_info and civitai_info.get('id') == lora.get('modelVersionId'):
|
||||||
|
cached_path = cached_lora.get('path') or cached_lora.get('file_path')
|
||||||
|
if cached_path:
|
||||||
|
file_name = os.path.splitext(os.path.basename(cached_path))[0]
|
||||||
|
break
|
||||||
|
|
||||||
|
if not file_name:
|
||||||
|
file_name = lora.get('file_name', 'unknown-lora')
|
||||||
|
|
||||||
|
strength = lora.get('strength', 1.0)
|
||||||
|
syntax_parts.append(f"<lora:{file_name}:{strength}>")
|
||||||
|
|
||||||
|
return syntax_parts
|
||||||
|
|
||||||
async def update_lora_filename_by_hash(self, hash_value: str, new_file_name: str) -> Tuple[int, int]:
|
async def update_lora_filename_by_hash(self, hash_value: str, new_file_name: str) -> Tuple[int, int]:
|
||||||
"""Update file_name in all recipes that contain a LoRA with the specified hash.
|
"""Update file_name in all recipes that contain a LoRA with the specified hash.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
hash_value: The SHA256 hash value of the LoRA
|
hash_value: The SHA256 hash value of the LoRA
|
||||||
new_file_name: The new file_name to set
|
new_file_name: The new file_name to set
|
||||||
|
|||||||
23
py/services/recipes/__init__.py
Normal file
23
py/services/recipes/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""Recipe service layer implementations."""
|
||||||
|
|
||||||
|
from .analysis_service import RecipeAnalysisService
|
||||||
|
from .persistence_service import RecipePersistenceService
|
||||||
|
from .sharing_service import RecipeSharingService
|
||||||
|
from .errors import (
|
||||||
|
RecipeServiceError,
|
||||||
|
RecipeValidationError,
|
||||||
|
RecipeNotFoundError,
|
||||||
|
RecipeDownloadError,
|
||||||
|
RecipeConflictError,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"RecipeAnalysisService",
|
||||||
|
"RecipePersistenceService",
|
||||||
|
"RecipeSharingService",
|
||||||
|
"RecipeServiceError",
|
||||||
|
"RecipeValidationError",
|
||||||
|
"RecipeNotFoundError",
|
||||||
|
"RecipeDownloadError",
|
||||||
|
"RecipeConflictError",
|
||||||
|
]
|
||||||
289
py/services/recipes/analysis_service.py
Normal file
289
py/services/recipes/analysis_service.py
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
"""Services responsible for recipe metadata analysis."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
from ...utils.utils import calculate_recipe_fingerprint
|
||||||
|
from .errors import (
|
||||||
|
RecipeDownloadError,
|
||||||
|
RecipeNotFoundError,
|
||||||
|
RecipeServiceError,
|
||||||
|
RecipeValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class AnalysisResult:
|
||||||
|
"""Return payload from analysis operations."""
|
||||||
|
|
||||||
|
payload: dict[str, Any]
|
||||||
|
status: int = 200
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeAnalysisService:
|
||||||
|
"""Extract recipe metadata from various image sources."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
exif_utils,
|
||||||
|
recipe_parser_factory,
|
||||||
|
downloader_factory: Callable[[], Any],
|
||||||
|
metadata_collector: Optional[Callable[[], Any]] = None,
|
||||||
|
metadata_processor_cls: Optional[type] = None,
|
||||||
|
metadata_registry_cls: Optional[type] = None,
|
||||||
|
standalone_mode: bool = False,
|
||||||
|
logger,
|
||||||
|
) -> None:
|
||||||
|
self._exif_utils = exif_utils
|
||||||
|
self._recipe_parser_factory = recipe_parser_factory
|
||||||
|
self._downloader_factory = downloader_factory
|
||||||
|
self._metadata_collector = metadata_collector
|
||||||
|
self._metadata_processor_cls = metadata_processor_cls
|
||||||
|
self._metadata_registry_cls = metadata_registry_cls
|
||||||
|
self._standalone_mode = standalone_mode
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
async def analyze_uploaded_image(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
image_bytes: bytes | None,
|
||||||
|
recipe_scanner,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
"""Analyze an uploaded image payload."""
|
||||||
|
|
||||||
|
if not image_bytes:
|
||||||
|
raise RecipeValidationError("No image data provided")
|
||||||
|
|
||||||
|
temp_path = self._write_temp_file(image_bytes)
|
||||||
|
try:
|
||||||
|
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
||||||
|
if not metadata:
|
||||||
|
return AnalysisResult({"error": "No metadata found in this image", "loras": []})
|
||||||
|
|
||||||
|
return await self._parse_metadata(
|
||||||
|
metadata,
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
image_path=None,
|
||||||
|
include_image_base64=False,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
self._safe_cleanup(temp_path)
|
||||||
|
|
||||||
|
async def analyze_remote_image(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
url: str | None,
|
||||||
|
recipe_scanner,
|
||||||
|
civitai_client,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
"""Analyze an image accessible via URL, including Civitai integration."""
|
||||||
|
|
||||||
|
if not url:
|
||||||
|
raise RecipeValidationError("No URL provided")
|
||||||
|
|
||||||
|
if civitai_client is None:
|
||||||
|
raise RecipeServiceError("Civitai client unavailable")
|
||||||
|
|
||||||
|
temp_path = self._create_temp_path()
|
||||||
|
metadata: Optional[dict[str, Any]] = None
|
||||||
|
try:
|
||||||
|
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
|
||||||
|
if civitai_match:
|
||||||
|
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
||||||
|
if not image_info:
|
||||||
|
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
||||||
|
image_url = image_info.get("url")
|
||||||
|
if not image_url:
|
||||||
|
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||||
|
await self._download_image(image_url, temp_path)
|
||||||
|
metadata = image_info.get("meta") if "meta" in image_info else None
|
||||||
|
else:
|
||||||
|
await self._download_image(url, temp_path)
|
||||||
|
|
||||||
|
if metadata is None:
|
||||||
|
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
||||||
|
|
||||||
|
if not metadata:
|
||||||
|
return self._metadata_not_found_response(temp_path)
|
||||||
|
|
||||||
|
return await self._parse_metadata(
|
||||||
|
metadata,
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
image_path=temp_path,
|
||||||
|
include_image_base64=True,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
self._safe_cleanup(temp_path)
|
||||||
|
|
||||||
|
async def analyze_local_image(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_path: str | None,
|
||||||
|
recipe_scanner,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
"""Analyze a file already present on disk."""
|
||||||
|
|
||||||
|
if not file_path:
|
||||||
|
raise RecipeValidationError("No file path provided")
|
||||||
|
|
||||||
|
normalized_path = os.path.normpath(file_path.strip('"').strip("'"))
|
||||||
|
if not os.path.isfile(normalized_path):
|
||||||
|
raise RecipeNotFoundError("File not found")
|
||||||
|
|
||||||
|
metadata = self._exif_utils.extract_image_metadata(normalized_path)
|
||||||
|
if not metadata:
|
||||||
|
return self._metadata_not_found_response(normalized_path)
|
||||||
|
|
||||||
|
return await self._parse_metadata(
|
||||||
|
metadata,
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
image_path=normalized_path,
|
||||||
|
include_image_base64=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def analyze_widget_metadata(self, *, recipe_scanner) -> AnalysisResult:
|
||||||
|
"""Analyse the most recent generation metadata for widget saves."""
|
||||||
|
|
||||||
|
if self._metadata_collector is None or self._metadata_processor_cls is None:
|
||||||
|
raise RecipeValidationError("Metadata collection not available")
|
||||||
|
|
||||||
|
raw_metadata = self._metadata_collector()
|
||||||
|
metadata_dict = self._metadata_processor_cls.to_dict(raw_metadata)
|
||||||
|
if not metadata_dict:
|
||||||
|
raise RecipeValidationError("No generation metadata found")
|
||||||
|
|
||||||
|
latest_image = None
|
||||||
|
if not self._standalone_mode and self._metadata_registry_cls is not None:
|
||||||
|
metadata_registry = self._metadata_registry_cls()
|
||||||
|
latest_image = metadata_registry.get_first_decoded_image()
|
||||||
|
|
||||||
|
if latest_image is None:
|
||||||
|
raise RecipeValidationError(
|
||||||
|
"No recent images found to use for recipe. Try generating an image first."
|
||||||
|
)
|
||||||
|
|
||||||
|
image_bytes = self._convert_tensor_to_png_bytes(latest_image)
|
||||||
|
if image_bytes is None:
|
||||||
|
raise RecipeValidationError("Cannot handle this data shape from metadata registry")
|
||||||
|
|
||||||
|
return AnalysisResult(
|
||||||
|
{
|
||||||
|
"metadata": metadata_dict,
|
||||||
|
"image_bytes": image_bytes,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Internal helpers -------------------------------------------------
|
||||||
|
|
||||||
|
async def _parse_metadata(
|
||||||
|
self,
|
||||||
|
metadata: dict[str, Any],
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
image_path: Optional[str],
|
||||||
|
include_image_base64: bool,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
parser = self._recipe_parser_factory.create_parser(metadata)
|
||||||
|
if parser is None:
|
||||||
|
payload = {"error": "No parser found for this image", "loras": []}
|
||||||
|
if include_image_base64 and image_path:
|
||||||
|
payload["image_base64"] = self._encode_file(image_path)
|
||||||
|
return AnalysisResult(payload)
|
||||||
|
|
||||||
|
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
|
||||||
|
|
||||||
|
if include_image_base64 and image_path:
|
||||||
|
result["image_base64"] = self._encode_file(image_path)
|
||||||
|
|
||||||
|
if "error" in result and not result.get("loras"):
|
||||||
|
return AnalysisResult(result)
|
||||||
|
|
||||||
|
fingerprint = calculate_recipe_fingerprint(result.get("loras", []))
|
||||||
|
result["fingerprint"] = fingerprint
|
||||||
|
|
||||||
|
matching_recipes: list[str] = []
|
||||||
|
if fingerprint:
|
||||||
|
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||||
|
result["matching_recipes"] = matching_recipes
|
||||||
|
|
||||||
|
return AnalysisResult(result)
|
||||||
|
|
||||||
|
async def _download_image(self, url: str, temp_path: str) -> None:
|
||||||
|
downloader = await self._downloader_factory()
|
||||||
|
success, result = await downloader.download_file(url, temp_path, use_auth=False)
|
||||||
|
if not success:
|
||||||
|
raise RecipeDownloadError(f"Failed to download image from URL: {result}")
|
||||||
|
|
||||||
|
def _metadata_not_found_response(self, path: str) -> AnalysisResult:
|
||||||
|
payload: dict[str, Any] = {"error": "No metadata found in this image", "loras": []}
|
||||||
|
if os.path.exists(path):
|
||||||
|
payload["image_base64"] = self._encode_file(path)
|
||||||
|
return AnalysisResult(payload)
|
||||||
|
|
||||||
|
def _write_temp_file(self, data: bytes) -> str:
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
|
||||||
|
temp_file.write(data)
|
||||||
|
return temp_file.name
|
||||||
|
|
||||||
|
def _create_temp_path(self) -> str:
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
|
||||||
|
return temp_file.name
|
||||||
|
|
||||||
|
def _safe_cleanup(self, path: Optional[str]) -> None:
|
||||||
|
if path and os.path.exists(path):
|
||||||
|
try:
|
||||||
|
os.unlink(path)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
self._logger.error("Error deleting temporary file: %s", exc)
|
||||||
|
|
||||||
|
def _encode_file(self, path: str) -> str:
|
||||||
|
with open(path, "rb") as image_file:
|
||||||
|
return base64.b64encode(image_file.read()).decode("utf-8")
|
||||||
|
|
||||||
|
def _convert_tensor_to_png_bytes(self, latest_image: Any) -> Optional[bytes]:
|
||||||
|
try:
|
||||||
|
if isinstance(latest_image, tuple):
|
||||||
|
tensor_image = latest_image[0] if latest_image else None
|
||||||
|
if tensor_image is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
tensor_image = latest_image
|
||||||
|
|
||||||
|
if hasattr(tensor_image, "shape"):
|
||||||
|
self._logger.debug(
|
||||||
|
"Tensor shape: %s, dtype: %s", tensor_image.shape, getattr(tensor_image, "dtype", None)
|
||||||
|
)
|
||||||
|
|
||||||
|
import torch # type: ignore[import-not-found]
|
||||||
|
|
||||||
|
if isinstance(tensor_image, torch.Tensor):
|
||||||
|
image_np = tensor_image.cpu().numpy()
|
||||||
|
else:
|
||||||
|
image_np = np.array(tensor_image)
|
||||||
|
|
||||||
|
while len(image_np.shape) > 3:
|
||||||
|
image_np = image_np[0]
|
||||||
|
|
||||||
|
if image_np.dtype in (np.float32, np.float64) and image_np.max() <= 1.0:
|
||||||
|
image_np = (image_np * 255).astype(np.uint8)
|
||||||
|
|
||||||
|
if len(image_np.shape) == 3 and image_np.shape[2] == 3:
|
||||||
|
pil_image = Image.fromarray(image_np)
|
||||||
|
img_byte_arr = io.BytesIO()
|
||||||
|
pil_image.save(img_byte_arr, format="PNG")
|
||||||
|
return img_byte_arr.getvalue()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging path
|
||||||
|
self._logger.error("Error processing image data: %s", exc, exc_info=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
22
py/services/recipes/errors.py
Normal file
22
py/services/recipes/errors.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""Shared exceptions for recipe services."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeServiceError(Exception):
|
||||||
|
"""Base exception for recipe service failures."""
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeValidationError(RecipeServiceError):
|
||||||
|
"""Raised when a request payload fails validation."""
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeNotFoundError(RecipeServiceError):
|
||||||
|
"""Raised when a recipe resource cannot be located."""
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeDownloadError(RecipeServiceError):
|
||||||
|
"""Raised when remote recipe assets cannot be downloaded."""
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeConflictError(RecipeServiceError):
|
||||||
|
"""Raised when a conflicting recipe state is detected."""
|
||||||
407
py/services/recipes/persistence_service.py
Normal file
407
py/services/recipes/persistence_service.py
Normal file
@@ -0,0 +1,407 @@
|
|||||||
|
"""Services encapsulating recipe persistence workflows."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict, Iterable, Optional
|
||||||
|
|
||||||
|
from ...config import config
|
||||||
|
from ...utils.utils import calculate_recipe_fingerprint
|
||||||
|
from .errors import RecipeNotFoundError, RecipeValidationError
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class PersistenceResult:
|
||||||
|
"""Return payload from persistence operations."""
|
||||||
|
|
||||||
|
payload: dict[str, Any]
|
||||||
|
status: int = 200
|
||||||
|
|
||||||
|
|
||||||
|
class RecipePersistenceService:
|
||||||
|
"""Coordinate recipe persistence tasks across storage and caches."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
exif_utils,
|
||||||
|
card_preview_width: int,
|
||||||
|
logger,
|
||||||
|
) -> None:
|
||||||
|
self._exif_utils = exif_utils
|
||||||
|
self._card_preview_width = card_preview_width
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
async def save_recipe(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
image_bytes: bytes | None,
|
||||||
|
image_base64: str | None,
|
||||||
|
name: str | None,
|
||||||
|
tags: Iterable[str],
|
||||||
|
metadata: Optional[dict[str, Any]],
|
||||||
|
) -> PersistenceResult:
|
||||||
|
"""Persist a user uploaded recipe."""
|
||||||
|
|
||||||
|
missing_fields = []
|
||||||
|
if not name:
|
||||||
|
missing_fields.append("name")
|
||||||
|
if metadata is None:
|
||||||
|
missing_fields.append("metadata")
|
||||||
|
if missing_fields:
|
||||||
|
raise RecipeValidationError(
|
||||||
|
f"Missing required fields: {', '.join(missing_fields)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_image_bytes = self._resolve_image_bytes(image_bytes, image_base64)
|
||||||
|
recipes_dir = recipe_scanner.recipes_dir
|
||||||
|
os.makedirs(recipes_dir, exist_ok=True)
|
||||||
|
|
||||||
|
recipe_id = str(uuid.uuid4())
|
||||||
|
optimized_image, extension = self._exif_utils.optimize_image(
|
||||||
|
image_data=resolved_image_bytes,
|
||||||
|
target_width=self._card_preview_width,
|
||||||
|
format="webp",
|
||||||
|
quality=85,
|
||||||
|
preserve_metadata=True,
|
||||||
|
)
|
||||||
|
image_filename = f"{recipe_id}{extension}"
|
||||||
|
image_path = os.path.join(recipes_dir, image_filename)
|
||||||
|
with open(image_path, "wb") as file_obj:
|
||||||
|
file_obj.write(optimized_image)
|
||||||
|
|
||||||
|
current_time = time.time()
|
||||||
|
loras_data = [self._normalise_lora_entry(lora) for lora in metadata.get("loras", [])]
|
||||||
|
|
||||||
|
gen_params = metadata.get("gen_params", {})
|
||||||
|
if not gen_params and "raw_metadata" in metadata:
|
||||||
|
raw_metadata = metadata.get("raw_metadata", {})
|
||||||
|
gen_params = {
|
||||||
|
"prompt": raw_metadata.get("prompt", ""),
|
||||||
|
"negative_prompt": raw_metadata.get("negative_prompt", ""),
|
||||||
|
"checkpoint": raw_metadata.get("checkpoint", {}),
|
||||||
|
"steps": raw_metadata.get("steps", ""),
|
||||||
|
"sampler": raw_metadata.get("sampler", ""),
|
||||||
|
"cfg_scale": raw_metadata.get("cfg_scale", ""),
|
||||||
|
"seed": raw_metadata.get("seed", ""),
|
||||||
|
"size": raw_metadata.get("size", ""),
|
||||||
|
"clip_skip": raw_metadata.get("clip_skip", ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras_data)
|
||||||
|
recipe_data: Dict[str, Any] = {
|
||||||
|
"id": recipe_id,
|
||||||
|
"file_path": image_path,
|
||||||
|
"title": name,
|
||||||
|
"modified": current_time,
|
||||||
|
"created_date": current_time,
|
||||||
|
"base_model": metadata.get("base_model", ""),
|
||||||
|
"loras": loras_data,
|
||||||
|
"gen_params": gen_params,
|
||||||
|
"fingerprint": fingerprint,
|
||||||
|
}
|
||||||
|
|
||||||
|
tags_list = list(tags)
|
||||||
|
if tags_list:
|
||||||
|
recipe_data["tags"] = tags_list
|
||||||
|
|
||||||
|
if metadata.get("source_path"):
|
||||||
|
recipe_data["source_path"] = metadata.get("source_path")
|
||||||
|
|
||||||
|
json_filename = f"{recipe_id}.recipe.json"
|
||||||
|
json_path = os.path.join(recipes_dir, json_filename)
|
||||||
|
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||||
|
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||||
|
|
||||||
|
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
|
||||||
|
await recipe_scanner.add_recipe(recipe_data)
|
||||||
|
|
||||||
|
return PersistenceResult(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"image_path": image_path,
|
||||||
|
"json_path": json_path,
|
||||||
|
"matching_recipes": matching_recipes,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
|
||||||
|
"""Delete an existing recipe."""
|
||||||
|
|
||||||
|
recipes_dir = recipe_scanner.recipes_dir
|
||||||
|
if not recipes_dir or not os.path.exists(recipes_dir):
|
||||||
|
raise RecipeNotFoundError("Recipes directory not found")
|
||||||
|
|
||||||
|
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
if not os.path.exists(recipe_json_path):
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
||||||
|
recipe_data = json.load(file_obj)
|
||||||
|
|
||||||
|
image_path = recipe_data.get("file_path")
|
||||||
|
os.remove(recipe_json_path)
|
||||||
|
if image_path and os.path.exists(image_path):
|
||||||
|
os.remove(image_path)
|
||||||
|
|
||||||
|
await recipe_scanner.remove_recipe(recipe_id)
|
||||||
|
return PersistenceResult({"success": True, "message": "Recipe deleted successfully"})
|
||||||
|
|
||||||
|
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
|
||||||
|
"""Update persisted metadata for a recipe."""
|
||||||
|
|
||||||
|
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")):
|
||||||
|
raise RecipeValidationError(
|
||||||
|
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)"
|
||||||
|
)
|
||||||
|
|
||||||
|
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
|
||||||
|
if not success:
|
||||||
|
raise RecipeNotFoundError("Recipe not found or update failed")
|
||||||
|
|
||||||
|
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
|
||||||
|
|
||||||
|
async def reconnect_lora(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
recipe_id: str,
|
||||||
|
lora_index: int,
|
||||||
|
target_name: str,
|
||||||
|
) -> PersistenceResult:
|
||||||
|
"""Reconnect a LoRA entry within an existing recipe."""
|
||||||
|
|
||||||
|
recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
if not os.path.exists(recipe_path):
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
target_lora = await recipe_scanner.get_local_lora(target_name)
|
||||||
|
if not target_lora:
|
||||||
|
raise RecipeNotFoundError(f"Local LoRA not found with name: {target_name}")
|
||||||
|
|
||||||
|
recipe_data, updated_lora = await recipe_scanner.update_lora_entry(
|
||||||
|
recipe_id,
|
||||||
|
lora_index,
|
||||||
|
target_name=target_name,
|
||||||
|
target_lora=target_lora,
|
||||||
|
)
|
||||||
|
|
||||||
|
image_path = recipe_data.get("file_path")
|
||||||
|
if image_path and os.path.exists(image_path):
|
||||||
|
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||||
|
|
||||||
|
matching_recipes = []
|
||||||
|
if "fingerprint" in recipe_data:
|
||||||
|
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(recipe_data["fingerprint"])
|
||||||
|
if recipe_id in matching_recipes:
|
||||||
|
matching_recipes.remove(recipe_id)
|
||||||
|
|
||||||
|
return PersistenceResult(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"updated_lora": updated_lora,
|
||||||
|
"matching_recipes": matching_recipes,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def bulk_delete(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
recipe_ids: Iterable[str],
|
||||||
|
) -> PersistenceResult:
|
||||||
|
"""Delete multiple recipes in a single request."""
|
||||||
|
|
||||||
|
recipe_ids = list(recipe_ids)
|
||||||
|
if not recipe_ids:
|
||||||
|
raise RecipeValidationError("No recipe IDs provided")
|
||||||
|
|
||||||
|
recipes_dir = recipe_scanner.recipes_dir
|
||||||
|
if not recipes_dir or not os.path.exists(recipes_dir):
|
||||||
|
raise RecipeNotFoundError("Recipes directory not found")
|
||||||
|
|
||||||
|
deleted_recipes: list[str] = []
|
||||||
|
failed_recipes: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
for recipe_id in recipe_ids:
|
||||||
|
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
if not os.path.exists(recipe_json_path):
|
||||||
|
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
||||||
|
recipe_data = json.load(file_obj)
|
||||||
|
image_path = recipe_data.get("file_path")
|
||||||
|
os.remove(recipe_json_path)
|
||||||
|
if image_path and os.path.exists(image_path):
|
||||||
|
os.remove(image_path)
|
||||||
|
deleted_recipes.append(recipe_id)
|
||||||
|
except Exception as exc:
|
||||||
|
failed_recipes.append({"id": recipe_id, "reason": str(exc)})
|
||||||
|
|
||||||
|
if deleted_recipes:
|
||||||
|
await recipe_scanner.bulk_remove(deleted_recipes)
|
||||||
|
|
||||||
|
return PersistenceResult(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"deleted": deleted_recipes,
|
||||||
|
"failed": failed_recipes,
|
||||||
|
"total_deleted": len(deleted_recipes),
|
||||||
|
"total_failed": len(failed_recipes),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def save_recipe_from_widget(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
metadata: dict[str, Any],
|
||||||
|
image_bytes: bytes,
|
||||||
|
) -> PersistenceResult:
|
||||||
|
"""Save a recipe constructed from widget metadata."""
|
||||||
|
|
||||||
|
if not metadata:
|
||||||
|
raise RecipeValidationError("No generation metadata found")
|
||||||
|
|
||||||
|
recipes_dir = recipe_scanner.recipes_dir
|
||||||
|
os.makedirs(recipes_dir, exist_ok=True)
|
||||||
|
|
||||||
|
recipe_id = str(uuid.uuid4())
|
||||||
|
optimized_image, extension = self._exif_utils.optimize_image(
|
||||||
|
image_data=image_bytes,
|
||||||
|
target_width=self._card_preview_width,
|
||||||
|
format="webp",
|
||||||
|
quality=85,
|
||||||
|
preserve_metadata=True,
|
||||||
|
)
|
||||||
|
image_filename = f"{recipe_id}{extension}"
|
||||||
|
image_path = os.path.join(recipes_dir, image_filename)
|
||||||
|
with open(image_path, "wb") as file_obj:
|
||||||
|
file_obj.write(optimized_image)
|
||||||
|
|
||||||
|
lora_stack = metadata.get("loras", "")
|
||||||
|
lora_matches = re.findall(r"<lora:([^:]+):([^>]+)>", lora_stack)
|
||||||
|
if not lora_matches:
|
||||||
|
raise RecipeValidationError("No LoRAs found in the generation metadata")
|
||||||
|
|
||||||
|
loras_data = []
|
||||||
|
base_model_counts: Dict[str, int] = {}
|
||||||
|
|
||||||
|
for name, strength in lora_matches:
|
||||||
|
lora_info = await recipe_scanner.get_local_lora(name)
|
||||||
|
lora_data = {
|
||||||
|
"file_name": name,
|
||||||
|
"strength": float(strength),
|
||||||
|
"hash": (lora_info.get("sha256") or "").lower() if lora_info else "",
|
||||||
|
"modelVersionId": (lora_info.get("civitai") or {}).get("id", 0) if lora_info else 0,
|
||||||
|
"modelName": ((lora_info.get("civitai") or {}).get("model") or {}).get("name", name) if lora_info else "",
|
||||||
|
"modelVersionName": (lora_info.get("civitai") or {}).get("name", "") if lora_info else "",
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False,
|
||||||
|
}
|
||||||
|
loras_data.append(lora_data)
|
||||||
|
|
||||||
|
if lora_info and "base_model" in lora_info:
|
||||||
|
base_model = lora_info["base_model"]
|
||||||
|
base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1
|
||||||
|
|
||||||
|
recipe_name = self._derive_recipe_name(lora_matches)
|
||||||
|
most_common_base_model = (
|
||||||
|
max(base_model_counts.items(), key=lambda item: item[1])[0] if base_model_counts else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
recipe_data = {
|
||||||
|
"id": recipe_id,
|
||||||
|
"file_path": image_path,
|
||||||
|
"title": recipe_name,
|
||||||
|
"modified": time.time(),
|
||||||
|
"created_date": time.time(),
|
||||||
|
"base_model": most_common_base_model,
|
||||||
|
"loras": loras_data,
|
||||||
|
"checkpoint": metadata.get("checkpoint", ""),
|
||||||
|
"gen_params": {
|
||||||
|
key: value
|
||||||
|
for key, value in metadata.items()
|
||||||
|
if key not in ["checkpoint", "loras"]
|
||||||
|
},
|
||||||
|
"loras_stack": lora_stack,
|
||||||
|
}
|
||||||
|
|
||||||
|
json_filename = f"{recipe_id}.recipe.json"
|
||||||
|
json_path = os.path.join(recipes_dir, json_filename)
|
||||||
|
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||||
|
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||||
|
await recipe_scanner.add_recipe(recipe_data)
|
||||||
|
|
||||||
|
return PersistenceResult(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"image_path": image_path,
|
||||||
|
"json_path": json_path,
|
||||||
|
"recipe_name": recipe_name,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Helper methods ---------------------------------------------------
|
||||||
|
|
||||||
|
def _resolve_image_bytes(self, image_bytes: bytes | None, image_base64: str | None) -> bytes:
|
||||||
|
if image_bytes is not None:
|
||||||
|
return image_bytes
|
||||||
|
if image_base64:
|
||||||
|
try:
|
||||||
|
payload = image_base64.split(",", 1)[1] if "," in image_base64 else image_base64
|
||||||
|
return base64.b64decode(payload)
|
||||||
|
except Exception as exc: # pragma: no cover - validation guard
|
||||||
|
raise RecipeValidationError(f"Invalid base64 image data: {exc}") from exc
|
||||||
|
raise RecipeValidationError("No image data provided")
|
||||||
|
|
||||||
|
def _normalise_lora_entry(self, lora: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"file_name": lora.get("file_name", "")
|
||||||
|
or (
|
||||||
|
os.path.splitext(os.path.basename(lora.get("localPath", "")))[0]
|
||||||
|
if lora.get("localPath")
|
||||||
|
else ""
|
||||||
|
),
|
||||||
|
"hash": (lora.get("hash") or "").lower(),
|
||||||
|
"strength": float(lora.get("weight", 1.0)),
|
||||||
|
"modelVersionId": lora.get("id", 0),
|
||||||
|
"modelName": lora.get("name", ""),
|
||||||
|
"modelVersionName": lora.get("version", ""),
|
||||||
|
"isDeleted": lora.get("isDeleted", False),
|
||||||
|
"exclude": lora.get("exclude", False),
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _find_matching_recipes(
|
||||||
|
self,
|
||||||
|
recipe_scanner,
|
||||||
|
fingerprint: str | None,
|
||||||
|
*,
|
||||||
|
exclude_id: Optional[str] = None,
|
||||||
|
) -> list[str]:
|
||||||
|
if not fingerprint:
|
||||||
|
return []
|
||||||
|
matches = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||||
|
if exclude_id and exclude_id in matches:
|
||||||
|
matches.remove(exclude_id)
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def _derive_recipe_name(self, lora_matches: list[tuple[str, str]]) -> str:
|
||||||
|
recipe_name_parts = [f"{name.strip()}-{float(strength):.2f}" for name, strength in lora_matches[:3]]
|
||||||
|
recipe_name = "_".join(recipe_name_parts)
|
||||||
|
return recipe_name or "recipe"
|
||||||
105
py/services/recipes/sharing_service.py
Normal file
105
py/services/recipes/sharing_service.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
"""Services handling recipe sharing and downloads."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from .errors import RecipeNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SharingResult:
|
||||||
|
"""Return payload for share operations."""
|
||||||
|
|
||||||
|
payload: dict[str, Any]
|
||||||
|
status: int = 200
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class DownloadInfo:
|
||||||
|
"""Information required to stream a shared recipe file."""
|
||||||
|
|
||||||
|
file_path: str
|
||||||
|
download_filename: str
|
||||||
|
|
||||||
|
|
||||||
|
class RecipeSharingService:
|
||||||
|
"""Prepare temporary recipe downloads with TTL cleanup."""
|
||||||
|
|
||||||
|
def __init__(self, *, ttl_seconds: int = 300, logger) -> None:
|
||||||
|
self._ttl_seconds = ttl_seconds
|
||||||
|
self._logger = logger
|
||||||
|
self._shared_recipes: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
async def share_recipe(self, *, recipe_scanner, recipe_id: str) -> SharingResult:
|
||||||
|
"""Prepare a temporary downloadable copy of a recipe image."""
|
||||||
|
|
||||||
|
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
if not recipe:
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
image_path = recipe.get("file_path")
|
||||||
|
if not image_path or not os.path.exists(image_path):
|
||||||
|
raise RecipeNotFoundError("Recipe image not found")
|
||||||
|
|
||||||
|
ext = os.path.splitext(image_path)[1]
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as temp_file:
|
||||||
|
temp_path = temp_file.name
|
||||||
|
|
||||||
|
shutil.copy2(image_path, temp_path)
|
||||||
|
timestamp = int(time.time())
|
||||||
|
self._shared_recipes[recipe_id] = {
|
||||||
|
"path": temp_path,
|
||||||
|
"timestamp": timestamp,
|
||||||
|
"expires": time.time() + self._ttl_seconds,
|
||||||
|
}
|
||||||
|
self._cleanup_shared_recipes()
|
||||||
|
|
||||||
|
safe_title = recipe.get("title", "").replace(" ", "_").lower()
|
||||||
|
filename = f"recipe_{safe_title}{ext}" if safe_title else f"recipe_{recipe_id}{ext}"
|
||||||
|
url_path = f"/api/recipe/{recipe_id}/share/download?t={timestamp}"
|
||||||
|
return SharingResult({"success": True, "download_url": url_path, "filename": filename})
|
||||||
|
|
||||||
|
async def prepare_download(self, *, recipe_scanner, recipe_id: str) -> DownloadInfo:
|
||||||
|
"""Return file path and filename for a prepared shared recipe."""
|
||||||
|
|
||||||
|
shared_info = self._shared_recipes.get(recipe_id)
|
||||||
|
if not shared_info or time.time() > shared_info.get("expires", 0):
|
||||||
|
self._cleanup_entry(recipe_id)
|
||||||
|
raise RecipeNotFoundError("Shared recipe not found or expired")
|
||||||
|
|
||||||
|
file_path = shared_info["path"]
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
self._cleanup_entry(recipe_id)
|
||||||
|
raise RecipeNotFoundError("Shared recipe file not found")
|
||||||
|
|
||||||
|
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
filename_base = (
|
||||||
|
f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" if recipe else recipe_id
|
||||||
|
)
|
||||||
|
ext = os.path.splitext(file_path)[1]
|
||||||
|
download_filename = f"{filename_base}{ext}"
|
||||||
|
return DownloadInfo(file_path=file_path, download_filename=download_filename)
|
||||||
|
|
||||||
|
def _cleanup_shared_recipes(self) -> None:
|
||||||
|
for recipe_id in list(self._shared_recipes.keys()):
|
||||||
|
shared = self._shared_recipes.get(recipe_id)
|
||||||
|
if not shared:
|
||||||
|
continue
|
||||||
|
if time.time() > shared.get("expires", 0):
|
||||||
|
self._cleanup_entry(recipe_id)
|
||||||
|
|
||||||
|
def _cleanup_entry(self, recipe_id: str) -> None:
|
||||||
|
shared_info = self._shared_recipes.pop(recipe_id, None)
|
||||||
|
if not shared_info:
|
||||||
|
return
|
||||||
|
file_path = shared_info.get("path")
|
||||||
|
if file_path and os.path.exists(file_path):
|
||||||
|
try:
|
||||||
|
os.unlink(file_path)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
self._logger.error("Error cleaning up shared recipe %s: %s", recipe_id, exc)
|
||||||
@@ -1,14 +1,50 @@
|
|||||||
import os
|
import copy
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, Dict, Iterable, List, Mapping, Optional
|
||||||
|
|
||||||
|
from ..utils.settings_paths import ensure_settings_file
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||||
|
"civitai_api_key": "",
|
||||||
|
"language": "en",
|
||||||
|
"show_only_sfw": False,
|
||||||
|
"enable_metadata_archive_db": False,
|
||||||
|
"proxy_enabled": False,
|
||||||
|
"proxy_host": "",
|
||||||
|
"proxy_port": "",
|
||||||
|
"proxy_username": "",
|
||||||
|
"proxy_password": "",
|
||||||
|
"proxy_type": "http",
|
||||||
|
"default_lora_root": "",
|
||||||
|
"default_checkpoint_root": "",
|
||||||
|
"default_embedding_root": "",
|
||||||
|
"base_model_path_mappings": {},
|
||||||
|
"download_path_templates": {},
|
||||||
|
"example_images_path": "",
|
||||||
|
"optimize_example_images": True,
|
||||||
|
"auto_download_example_images": False,
|
||||||
|
"blur_mature_content": True,
|
||||||
|
"autoplay_on_hover": False,
|
||||||
|
"display_density": "default",
|
||||||
|
"card_info_display": "always",
|
||||||
|
"include_trigger_words": False,
|
||||||
|
"compact_mode": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SettingsManager:
|
class SettingsManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.settings_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'settings.json')
|
self.settings_file = ensure_settings_file(logger)
|
||||||
self.settings = self._load_settings()
|
self.settings = self._load_settings()
|
||||||
|
self._migrate_setting_keys()
|
||||||
|
self._ensure_default_settings()
|
||||||
|
self._migrate_to_library_registry()
|
||||||
self._migrate_download_path_template()
|
self._migrate_download_path_template()
|
||||||
self._auto_set_default_roots()
|
self._auto_set_default_roots()
|
||||||
self._check_environment_variables()
|
self._check_environment_variables()
|
||||||
@@ -23,11 +59,266 @@ class SettingsManager:
|
|||||||
logger.error(f"Error loading settings: {e}")
|
logger.error(f"Error loading settings: {e}")
|
||||||
return self._get_default_settings()
|
return self._get_default_settings()
|
||||||
|
|
||||||
|
def _ensure_default_settings(self) -> None:
|
||||||
|
"""Ensure all default settings keys exist"""
|
||||||
|
updated = False
|
||||||
|
for key, value in self._get_default_settings().items():
|
||||||
|
if key not in self.settings:
|
||||||
|
if isinstance(value, dict):
|
||||||
|
self.settings[key] = value.copy()
|
||||||
|
else:
|
||||||
|
self.settings[key] = value
|
||||||
|
updated = True
|
||||||
|
if updated:
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
|
def _migrate_to_library_registry(self) -> None:
|
||||||
|
"""Ensure settings include the multi-library registry structure."""
|
||||||
|
libraries = self.settings.get("libraries")
|
||||||
|
active_name = self.settings.get("active_library")
|
||||||
|
|
||||||
|
if not isinstance(libraries, dict) or not libraries:
|
||||||
|
library_name = active_name or "default"
|
||||||
|
library_payload = self._build_library_payload(
|
||||||
|
folder_paths=self.settings.get("folder_paths", {}),
|
||||||
|
default_lora_root=self.settings.get("default_lora_root", ""),
|
||||||
|
default_checkpoint_root=self.settings.get("default_checkpoint_root", ""),
|
||||||
|
default_embedding_root=self.settings.get("default_embedding_root", ""),
|
||||||
|
)
|
||||||
|
libraries = {library_name: library_payload}
|
||||||
|
self.settings["libraries"] = libraries
|
||||||
|
self.settings["active_library"] = library_name
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
return
|
||||||
|
|
||||||
|
sanitized_libraries: Dict[str, Dict[str, Any]] = {}
|
||||||
|
changed = False
|
||||||
|
for name, data in libraries.items():
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
data = {}
|
||||||
|
changed = True
|
||||||
|
payload = self._build_library_payload(
|
||||||
|
folder_paths=data.get("folder_paths"),
|
||||||
|
default_lora_root=data.get("default_lora_root"),
|
||||||
|
default_checkpoint_root=data.get("default_checkpoint_root"),
|
||||||
|
default_embedding_root=data.get("default_embedding_root"),
|
||||||
|
metadata=data.get("metadata"),
|
||||||
|
base=data,
|
||||||
|
)
|
||||||
|
sanitized_libraries[name] = payload
|
||||||
|
if payload is not data:
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
self.settings["libraries"] = sanitized_libraries
|
||||||
|
|
||||||
|
if not active_name or active_name not in sanitized_libraries:
|
||||||
|
if sanitized_libraries:
|
||||||
|
self.settings["active_library"] = next(iter(sanitized_libraries.keys()))
|
||||||
|
else:
|
||||||
|
self.settings["active_library"] = "default"
|
||||||
|
|
||||||
|
self._sync_active_library_to_root(save=changed)
|
||||||
|
|
||||||
|
def _sync_active_library_to_root(self, *, save: bool = False) -> None:
|
||||||
|
"""Update top-level folder path settings to mirror the active library."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
active_name = self.settings.get("active_library")
|
||||||
|
if not libraries:
|
||||||
|
return
|
||||||
|
|
||||||
|
if active_name not in libraries:
|
||||||
|
active_name = next(iter(libraries.keys()))
|
||||||
|
self.settings["active_library"] = active_name
|
||||||
|
|
||||||
|
active_library = libraries.get(active_name, {})
|
||||||
|
folder_paths = copy.deepcopy(active_library.get("folder_paths", {}))
|
||||||
|
self.settings["folder_paths"] = folder_paths
|
||||||
|
self.settings["default_lora_root"] = active_library.get("default_lora_root", "")
|
||||||
|
self.settings["default_checkpoint_root"] = active_library.get("default_checkpoint_root", "")
|
||||||
|
self.settings["default_embedding_root"] = active_library.get("default_embedding_root", "")
|
||||||
|
|
||||||
|
if save:
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
|
def _current_timestamp(self) -> str:
|
||||||
|
return datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
||||||
|
|
||||||
|
def _build_library_payload(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
|
||||||
|
default_lora_root: Optional[str] = None,
|
||||||
|
default_checkpoint_root: Optional[str] = None,
|
||||||
|
default_embedding_root: Optional[str] = None,
|
||||||
|
metadata: Optional[Mapping[str, Any]] = None,
|
||||||
|
base: Optional[Mapping[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
payload: Dict[str, Any] = dict(base or {})
|
||||||
|
timestamp = self._current_timestamp()
|
||||||
|
|
||||||
|
if folder_paths is not None:
|
||||||
|
payload["folder_paths"] = self._normalize_folder_paths(folder_paths)
|
||||||
|
else:
|
||||||
|
payload.setdefault("folder_paths", {})
|
||||||
|
|
||||||
|
if default_lora_root is not None:
|
||||||
|
payload["default_lora_root"] = default_lora_root
|
||||||
|
else:
|
||||||
|
payload.setdefault("default_lora_root", "")
|
||||||
|
|
||||||
|
if default_checkpoint_root is not None:
|
||||||
|
payload["default_checkpoint_root"] = default_checkpoint_root
|
||||||
|
else:
|
||||||
|
payload.setdefault("default_checkpoint_root", "")
|
||||||
|
|
||||||
|
if default_embedding_root is not None:
|
||||||
|
payload["default_embedding_root"] = default_embedding_root
|
||||||
|
else:
|
||||||
|
payload.setdefault("default_embedding_root", "")
|
||||||
|
|
||||||
|
if metadata:
|
||||||
|
merged_meta = dict(payload.get("metadata", {}))
|
||||||
|
merged_meta.update(metadata)
|
||||||
|
payload["metadata"] = merged_meta
|
||||||
|
|
||||||
|
payload.setdefault("created_at", timestamp)
|
||||||
|
payload["updated_at"] = timestamp
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def _normalize_folder_paths(
|
||||||
|
self, folder_paths: Mapping[str, Iterable[str]]
|
||||||
|
) -> Dict[str, List[str]]:
|
||||||
|
normalized: Dict[str, List[str]] = {}
|
||||||
|
for key, values in folder_paths.items():
|
||||||
|
if not isinstance(values, Iterable):
|
||||||
|
continue
|
||||||
|
cleaned: List[str] = []
|
||||||
|
seen = set()
|
||||||
|
for value in values:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
continue
|
||||||
|
stripped = value.strip()
|
||||||
|
if not stripped:
|
||||||
|
continue
|
||||||
|
if stripped not in seen:
|
||||||
|
cleaned.append(stripped)
|
||||||
|
seen.add(stripped)
|
||||||
|
normalized[key] = cleaned
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def _validate_folder_paths(
|
||||||
|
self,
|
||||||
|
library_name: str,
|
||||||
|
folder_paths: Mapping[str, Iterable[str]],
|
||||||
|
) -> None:
|
||||||
|
"""Ensure folder paths do not overlap with other libraries."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
normalized_new: Dict[str, Dict[str, str]] = {}
|
||||||
|
for key, values in folder_paths.items():
|
||||||
|
path_map: Dict[str, str] = {}
|
||||||
|
for value in values:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
continue
|
||||||
|
stripped = value.strip()
|
||||||
|
if not stripped:
|
||||||
|
continue
|
||||||
|
normalized_value = os.path.normcase(os.path.normpath(stripped))
|
||||||
|
path_map[normalized_value] = stripped
|
||||||
|
if path_map:
|
||||||
|
normalized_new[key] = path_map
|
||||||
|
|
||||||
|
if not normalized_new:
|
||||||
|
return
|
||||||
|
|
||||||
|
for other_name, other in libraries.items():
|
||||||
|
if other_name == library_name:
|
||||||
|
continue
|
||||||
|
other_paths = other.get("folder_paths", {})
|
||||||
|
for key, new_paths in normalized_new.items():
|
||||||
|
existing = {
|
||||||
|
os.path.normcase(os.path.normpath(path))
|
||||||
|
for path in other_paths.get(key, [])
|
||||||
|
if isinstance(path, str) and path
|
||||||
|
}
|
||||||
|
overlap = existing.intersection(new_paths.keys())
|
||||||
|
if overlap:
|
||||||
|
collisions = ", ".join(sorted(new_paths[value] for value in overlap))
|
||||||
|
raise ValueError(
|
||||||
|
f"Folder path(s) {collisions} already assigned to library '{other_name}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _update_active_library_entry(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
|
||||||
|
default_lora_root: Optional[str] = None,
|
||||||
|
default_checkpoint_root: Optional[str] = None,
|
||||||
|
default_embedding_root: Optional[str] = None,
|
||||||
|
) -> bool:
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
active_name = self.settings.get("active_library")
|
||||||
|
if not active_name or active_name not in libraries:
|
||||||
|
return False
|
||||||
|
|
||||||
|
library = libraries[active_name]
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
if folder_paths is not None:
|
||||||
|
normalized_paths = self._normalize_folder_paths(folder_paths)
|
||||||
|
if library.get("folder_paths") != normalized_paths:
|
||||||
|
library["folder_paths"] = normalized_paths
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if default_lora_root is not None and library.get("default_lora_root") != default_lora_root:
|
||||||
|
library["default_lora_root"] = default_lora_root
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if default_checkpoint_root is not None and library.get("default_checkpoint_root") != default_checkpoint_root:
|
||||||
|
library["default_checkpoint_root"] = default_checkpoint_root
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if default_embedding_root is not None and library.get("default_embedding_root") != default_embedding_root:
|
||||||
|
library["default_embedding_root"] = default_embedding_root
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
library.setdefault("created_at", self._current_timestamp())
|
||||||
|
library["updated_at"] = self._current_timestamp()
|
||||||
|
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def _migrate_setting_keys(self) -> None:
|
||||||
|
"""Migrate legacy camelCase setting keys to snake_case"""
|
||||||
|
key_migrations = {
|
||||||
|
'optimizeExampleImages': 'optimize_example_images',
|
||||||
|
'autoDownloadExampleImages': 'auto_download_example_images',
|
||||||
|
'blurMatureContent': 'blur_mature_content',
|
||||||
|
'autoplayOnHover': 'autoplay_on_hover',
|
||||||
|
'displayDensity': 'display_density',
|
||||||
|
'cardInfoDisplay': 'card_info_display',
|
||||||
|
'includeTriggerWords': 'include_trigger_words',
|
||||||
|
'compactMode': 'compact_mode',
|
||||||
|
}
|
||||||
|
|
||||||
|
updated = False
|
||||||
|
for old_key, new_key in key_migrations.items():
|
||||||
|
if old_key in self.settings:
|
||||||
|
if new_key not in self.settings:
|
||||||
|
self.settings[new_key] = self.settings[old_key]
|
||||||
|
del self.settings[old_key]
|
||||||
|
updated = True
|
||||||
|
|
||||||
|
if updated:
|
||||||
|
logger.info("Migrated legacy setting keys to snake_case")
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
def _migrate_download_path_template(self):
|
def _migrate_download_path_template(self):
|
||||||
"""Migrate old download_path_template to new download_path_templates"""
|
"""Migrate old download_path_template to new download_path_templates"""
|
||||||
old_template = self.settings.get('download_path_template')
|
old_template = self.settings.get('download_path_template')
|
||||||
templates = self.settings.get('download_path_templates')
|
templates = self.settings.get('download_path_templates')
|
||||||
|
|
||||||
# If old template exists and new templates don't exist, migrate
|
# If old template exists and new templates don't exist, migrate
|
||||||
if old_template is not None and not templates:
|
if old_template is not None and not templates:
|
||||||
logger.info("Migrating download_path_template to download_path_templates")
|
logger.info("Migrating download_path_template to download_path_templates")
|
||||||
@@ -42,25 +333,36 @@ class SettingsManager:
|
|||||||
logger.info("Migration completed")
|
logger.info("Migration completed")
|
||||||
|
|
||||||
def _auto_set_default_roots(self):
|
def _auto_set_default_roots(self):
|
||||||
"""Auto set default root paths if only one folder is present and default is empty."""
|
"""Auto set default root paths when only one folder is present and the current default is unset or not among the options."""
|
||||||
folder_paths = self.settings.get('folder_paths', {})
|
folder_paths = self.settings.get('folder_paths', {})
|
||||||
updated = False
|
updated = False
|
||||||
# loras
|
# loras
|
||||||
loras = folder_paths.get('loras', [])
|
loras = folder_paths.get('loras', [])
|
||||||
if isinstance(loras, list) and len(loras) == 1 and not self.settings.get('default_lora_root'):
|
if isinstance(loras, list) and len(loras) == 1:
|
||||||
self.settings['default_lora_root'] = loras[0]
|
current_lora_root = self.settings.get('default_lora_root')
|
||||||
updated = True
|
if current_lora_root not in loras:
|
||||||
|
self.settings['default_lora_root'] = loras[0]
|
||||||
|
updated = True
|
||||||
# checkpoints
|
# checkpoints
|
||||||
checkpoints = folder_paths.get('checkpoints', [])
|
checkpoints = folder_paths.get('checkpoints', [])
|
||||||
if isinstance(checkpoints, list) and len(checkpoints) == 1 and not self.settings.get('default_checkpoint_root'):
|
if isinstance(checkpoints, list) and len(checkpoints) == 1:
|
||||||
self.settings['default_checkpoint_root'] = checkpoints[0]
|
current_checkpoint_root = self.settings.get('default_checkpoint_root')
|
||||||
updated = True
|
if current_checkpoint_root not in checkpoints:
|
||||||
|
self.settings['default_checkpoint_root'] = checkpoints[0]
|
||||||
|
updated = True
|
||||||
# embeddings
|
# embeddings
|
||||||
embeddings = folder_paths.get('embeddings', [])
|
embeddings = folder_paths.get('embeddings', [])
|
||||||
if isinstance(embeddings, list) and len(embeddings) == 1 and not self.settings.get('default_embedding_root'):
|
if isinstance(embeddings, list) and len(embeddings) == 1:
|
||||||
self.settings['default_embedding_root'] = embeddings[0]
|
current_embedding_root = self.settings.get('default_embedding_root')
|
||||||
updated = True
|
if current_embedding_root not in embeddings:
|
||||||
|
self.settings['default_embedding_root'] = embeddings[0]
|
||||||
|
updated = True
|
||||||
if updated:
|
if updated:
|
||||||
|
self._update_active_library_entry(
|
||||||
|
default_lora_root=self.settings.get('default_lora_root'),
|
||||||
|
default_checkpoint_root=self.settings.get('default_checkpoint_root'),
|
||||||
|
default_embedding_root=self.settings.get('default_embedding_root'),
|
||||||
|
)
|
||||||
self._save_settings()
|
self._save_settings()
|
||||||
|
|
||||||
def _check_environment_variables(self) -> None:
|
def _check_environment_variables(self) -> None:
|
||||||
@@ -78,17 +380,11 @@ class SettingsManager:
|
|||||||
|
|
||||||
def _get_default_settings(self) -> Dict[str, Any]:
|
def _get_default_settings(self) -> Dict[str, Any]:
|
||||||
"""Return default settings"""
|
"""Return default settings"""
|
||||||
return {
|
defaults = DEFAULT_SETTINGS.copy()
|
||||||
"civitai_api_key": "",
|
# Ensure nested dicts are independent copies
|
||||||
"language": "en",
|
defaults['base_model_path_mappings'] = {}
|
||||||
"enable_metadata_archive_db": False, # Enable metadata archive database
|
defaults['download_path_templates'] = {}
|
||||||
"proxy_enabled": False, # Enable app-level proxy
|
return defaults
|
||||||
"proxy_host": "", # Proxy host
|
|
||||||
"proxy_port": "", # Proxy port
|
|
||||||
"proxy_username": "", # Proxy username (optional)
|
|
||||||
"proxy_password": "", # Proxy password (optional)
|
|
||||||
"proxy_type": "http" # Proxy type: http, https, socks4, socks5
|
|
||||||
}
|
|
||||||
|
|
||||||
def get(self, key: str, default: Any = None) -> Any:
|
def get(self, key: str, default: Any = None) -> Any:
|
||||||
"""Get setting value"""
|
"""Get setting value"""
|
||||||
@@ -97,6 +393,14 @@ class SettingsManager:
|
|||||||
def set(self, key: str, value: Any) -> None:
|
def set(self, key: str, value: Any) -> None:
|
||||||
"""Set setting value and save"""
|
"""Set setting value and save"""
|
||||||
self.settings[key] = value
|
self.settings[key] = value
|
||||||
|
if key == 'folder_paths' and isinstance(value, Mapping):
|
||||||
|
self._update_active_library_entry(folder_paths=value) # type: ignore[arg-type]
|
||||||
|
elif key == 'default_lora_root':
|
||||||
|
self._update_active_library_entry(default_lora_root=str(value))
|
||||||
|
elif key == 'default_checkpoint_root':
|
||||||
|
self._update_active_library_entry(default_checkpoint_root=str(value))
|
||||||
|
elif key == 'default_embedding_root':
|
||||||
|
self._update_active_library_entry(default_embedding_root=str(value))
|
||||||
self._save_settings()
|
self._save_settings()
|
||||||
|
|
||||||
def delete(self, key: str) -> None:
|
def delete(self, key: str) -> None:
|
||||||
@@ -114,6 +418,227 @@ class SettingsManager:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error saving settings: {e}")
|
logger.error(f"Error saving settings: {e}")
|
||||||
|
|
||||||
|
def get_libraries(self) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Return a copy of the registered libraries."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
return copy.deepcopy(libraries)
|
||||||
|
|
||||||
|
def get_active_library_name(self) -> str:
|
||||||
|
"""Return the currently active library name."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
active_name = self.settings.get("active_library")
|
||||||
|
if active_name and active_name in libraries:
|
||||||
|
return active_name
|
||||||
|
if libraries:
|
||||||
|
return next(iter(libraries.keys()))
|
||||||
|
return "default"
|
||||||
|
|
||||||
|
def get_active_library(self) -> Dict[str, Any]:
|
||||||
|
"""Return a copy of the active library configuration."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
active_name = self.get_active_library_name()
|
||||||
|
return copy.deepcopy(libraries.get(active_name, {}))
|
||||||
|
|
||||||
|
def activate_library(self, library_name: str) -> None:
|
||||||
|
"""Activate a library by name and refresh dependent services."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
if library_name not in libraries:
|
||||||
|
raise KeyError(f"Library '{library_name}' does not exist")
|
||||||
|
|
||||||
|
current_active = self.get_active_library_name()
|
||||||
|
if current_active == library_name:
|
||||||
|
# Ensure root settings stay in sync even if already active
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
self._notify_library_change(library_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.settings["active_library"] = library_name
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
self._notify_library_change(library_name)
|
||||||
|
|
||||||
|
def upsert_library(
|
||||||
|
self,
|
||||||
|
library_name: str,
|
||||||
|
*,
|
||||||
|
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
|
||||||
|
default_lora_root: Optional[str] = None,
|
||||||
|
default_checkpoint_root: Optional[str] = None,
|
||||||
|
default_embedding_root: Optional[str] = None,
|
||||||
|
metadata: Optional[Mapping[str, Any]] = None,
|
||||||
|
activate: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create or update a library definition."""
|
||||||
|
|
||||||
|
name = library_name.strip()
|
||||||
|
if not name:
|
||||||
|
raise ValueError("Library name cannot be empty")
|
||||||
|
|
||||||
|
if folder_paths is not None:
|
||||||
|
self._validate_folder_paths(name, folder_paths)
|
||||||
|
|
||||||
|
libraries = self.settings.setdefault("libraries", {})
|
||||||
|
existing = libraries.get(name, {})
|
||||||
|
|
||||||
|
payload = self._build_library_payload(
|
||||||
|
folder_paths=folder_paths if folder_paths is not None else existing.get("folder_paths"),
|
||||||
|
default_lora_root=default_lora_root if default_lora_root is not None else existing.get("default_lora_root"),
|
||||||
|
default_checkpoint_root=(
|
||||||
|
default_checkpoint_root
|
||||||
|
if default_checkpoint_root is not None
|
||||||
|
else existing.get("default_checkpoint_root")
|
||||||
|
),
|
||||||
|
default_embedding_root=(
|
||||||
|
default_embedding_root
|
||||||
|
if default_embedding_root is not None
|
||||||
|
else existing.get("default_embedding_root")
|
||||||
|
),
|
||||||
|
metadata=metadata if metadata is not None else existing.get("metadata"),
|
||||||
|
base=existing,
|
||||||
|
)
|
||||||
|
|
||||||
|
libraries[name] = payload
|
||||||
|
|
||||||
|
if activate or not self.settings.get("active_library"):
|
||||||
|
self.settings["active_library"] = name
|
||||||
|
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
|
if self.settings.get("active_library") == name:
|
||||||
|
self._notify_library_change(name)
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def create_library(
|
||||||
|
self,
|
||||||
|
library_name: str,
|
||||||
|
*,
|
||||||
|
folder_paths: Mapping[str, Iterable[str]],
|
||||||
|
default_lora_root: str = "",
|
||||||
|
default_checkpoint_root: str = "",
|
||||||
|
default_embedding_root: str = "",
|
||||||
|
metadata: Optional[Mapping[str, Any]] = None,
|
||||||
|
activate: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a new library entry."""
|
||||||
|
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
if library_name in libraries:
|
||||||
|
raise ValueError(f"Library '{library_name}' already exists")
|
||||||
|
|
||||||
|
return self.upsert_library(
|
||||||
|
library_name,
|
||||||
|
folder_paths=folder_paths,
|
||||||
|
default_lora_root=default_lora_root,
|
||||||
|
default_checkpoint_root=default_checkpoint_root,
|
||||||
|
default_embedding_root=default_embedding_root,
|
||||||
|
metadata=metadata,
|
||||||
|
activate=activate,
|
||||||
|
)
|
||||||
|
|
||||||
|
def rename_library(self, old_name: str, new_name: str) -> None:
|
||||||
|
"""Rename an existing library."""
|
||||||
|
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
if old_name not in libraries:
|
||||||
|
raise KeyError(f"Library '{old_name}' does not exist")
|
||||||
|
new_name_stripped = new_name.strip()
|
||||||
|
if not new_name_stripped:
|
||||||
|
raise ValueError("New library name cannot be empty")
|
||||||
|
if new_name_stripped in libraries:
|
||||||
|
raise ValueError(f"Library '{new_name_stripped}' already exists")
|
||||||
|
|
||||||
|
libraries[new_name_stripped] = libraries.pop(old_name)
|
||||||
|
if self.settings.get("active_library") == old_name:
|
||||||
|
self.settings["active_library"] = new_name_stripped
|
||||||
|
active_name = new_name_stripped
|
||||||
|
else:
|
||||||
|
active_name = self.settings.get("active_library")
|
||||||
|
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
|
if active_name == new_name_stripped:
|
||||||
|
self._notify_library_change(new_name_stripped)
|
||||||
|
|
||||||
|
def delete_library(self, library_name: str) -> None:
|
||||||
|
"""Remove a library definition."""
|
||||||
|
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
if library_name not in libraries:
|
||||||
|
raise KeyError(f"Library '{library_name}' does not exist")
|
||||||
|
if len(libraries) == 1:
|
||||||
|
raise ValueError("At least one library must remain")
|
||||||
|
|
||||||
|
was_active = self.settings.get("active_library") == library_name
|
||||||
|
libraries.pop(library_name)
|
||||||
|
|
||||||
|
if was_active:
|
||||||
|
new_active = next(iter(libraries.keys()))
|
||||||
|
self.settings["active_library"] = new_active
|
||||||
|
self._sync_active_library_to_root(save=False)
|
||||||
|
self._save_settings()
|
||||||
|
|
||||||
|
if was_active:
|
||||||
|
self._notify_library_change(self.settings["active_library"])
|
||||||
|
|
||||||
|
def update_active_library_paths(
|
||||||
|
self,
|
||||||
|
folder_paths: Mapping[str, Iterable[str]],
|
||||||
|
*,
|
||||||
|
default_lora_root: Optional[str] = None,
|
||||||
|
default_checkpoint_root: Optional[str] = None,
|
||||||
|
default_embedding_root: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Update folder paths for the active library."""
|
||||||
|
|
||||||
|
active_name = self.get_active_library_name()
|
||||||
|
self.upsert_library(
|
||||||
|
active_name,
|
||||||
|
folder_paths=folder_paths,
|
||||||
|
default_lora_root=default_lora_root,
|
||||||
|
default_checkpoint_root=default_checkpoint_root,
|
||||||
|
default_embedding_root=default_embedding_root,
|
||||||
|
activate=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _notify_library_change(self, library_name: str) -> None:
|
||||||
|
"""Notify dependent services that the active library changed."""
|
||||||
|
libraries = self.settings.get("libraries", {})
|
||||||
|
library_config = libraries.get(library_name, {})
|
||||||
|
library_snapshot = copy.deepcopy(library_config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..config import config # Local import to avoid circular dependency
|
||||||
|
|
||||||
|
config.apply_library_settings(library_snapshot)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug("Failed to apply library settings to config: %s", exc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .service_registry import ServiceRegistry # type: ignore
|
||||||
|
|
||||||
|
for service_name in (
|
||||||
|
"lora_scanner",
|
||||||
|
"checkpoint_scanner",
|
||||||
|
"embedding_scanner",
|
||||||
|
"recipe_scanner",
|
||||||
|
):
|
||||||
|
service = ServiceRegistry.get_service_sync(service_name)
|
||||||
|
if service and hasattr(service, "on_library_changed"):
|
||||||
|
try:
|
||||||
|
service.on_library_changed()
|
||||||
|
except Exception as service_exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug(
|
||||||
|
"Service %s failed to handle library change: %s",
|
||||||
|
service_name,
|
||||||
|
service_exc,
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug("Failed to notify services about library change: %s", exc)
|
||||||
|
|
||||||
def get_download_path_template(self, model_type: str) -> str:
|
def get_download_path_template(self, model_type: str) -> str:
|
||||||
"""Get download path template for specific model type
|
"""Get download path template for specific model type
|
||||||
|
|
||||||
|
|||||||
47
py/services/tag_update_service.py
Normal file
47
py/services/tag_update_service.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"""Service for updating tag collections on metadata records."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from typing import Awaitable, Callable, Dict, List, Sequence
|
||||||
|
|
||||||
|
|
||||||
|
class TagUpdateService:
|
||||||
|
"""Encapsulate tag manipulation for models."""
|
||||||
|
|
||||||
|
def __init__(self, *, metadata_manager) -> None:
|
||||||
|
self._metadata_manager = metadata_manager
|
||||||
|
|
||||||
|
async def add_tags(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_path: str,
|
||||||
|
new_tags: Sequence[str],
|
||||||
|
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
|
||||||
|
update_cache: Callable[[str, str, Dict[str, object]], Awaitable[bool]],
|
||||||
|
) -> List[str]:
|
||||||
|
"""Add tags to a metadata entry while keeping case-insensitive uniqueness."""
|
||||||
|
|
||||||
|
base, _ = os.path.splitext(file_path)
|
||||||
|
metadata_path = f"{base}.metadata.json"
|
||||||
|
metadata = await metadata_loader(metadata_path)
|
||||||
|
|
||||||
|
existing_tags = list(metadata.get("tags", []))
|
||||||
|
existing_lower = [tag.lower() for tag in existing_tags]
|
||||||
|
|
||||||
|
tags_added: List[str] = []
|
||||||
|
for tag in new_tags:
|
||||||
|
if isinstance(tag, str) and tag.strip():
|
||||||
|
normalized = tag.strip()
|
||||||
|
if normalized.lower() not in existing_lower:
|
||||||
|
existing_tags.append(normalized)
|
||||||
|
existing_lower.append(normalized.lower())
|
||||||
|
tags_added.append(normalized)
|
||||||
|
|
||||||
|
metadata["tags"] = existing_tags
|
||||||
|
await self._metadata_manager.save_metadata(file_path, metadata)
|
||||||
|
await update_cache(file_path, file_path, metadata)
|
||||||
|
|
||||||
|
return existing_tags
|
||||||
|
|
||||||
37
py/services/use_cases/__init__.py
Normal file
37
py/services/use_cases/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
"""Application-level orchestration services for model routes."""
|
||||||
|
|
||||||
|
from .auto_organize_use_case import (
|
||||||
|
AutoOrganizeInProgressError,
|
||||||
|
AutoOrganizeUseCase,
|
||||||
|
)
|
||||||
|
from .bulk_metadata_refresh_use_case import (
|
||||||
|
BulkMetadataRefreshUseCase,
|
||||||
|
MetadataRefreshProgressReporter,
|
||||||
|
)
|
||||||
|
from .download_model_use_case import (
|
||||||
|
DownloadModelEarlyAccessError,
|
||||||
|
DownloadModelUseCase,
|
||||||
|
DownloadModelValidationError,
|
||||||
|
)
|
||||||
|
from .example_images import (
|
||||||
|
DownloadExampleImagesConfigurationError,
|
||||||
|
DownloadExampleImagesInProgressError,
|
||||||
|
DownloadExampleImagesUseCase,
|
||||||
|
ImportExampleImagesUseCase,
|
||||||
|
ImportExampleImagesValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"AutoOrganizeInProgressError",
|
||||||
|
"AutoOrganizeUseCase",
|
||||||
|
"BulkMetadataRefreshUseCase",
|
||||||
|
"MetadataRefreshProgressReporter",
|
||||||
|
"DownloadModelEarlyAccessError",
|
||||||
|
"DownloadModelUseCase",
|
||||||
|
"DownloadModelValidationError",
|
||||||
|
"DownloadExampleImagesConfigurationError",
|
||||||
|
"DownloadExampleImagesInProgressError",
|
||||||
|
"DownloadExampleImagesUseCase",
|
||||||
|
"ImportExampleImagesUseCase",
|
||||||
|
"ImportExampleImagesValidationError",
|
||||||
|
]
|
||||||
56
py/services/use_cases/auto_organize_use_case.py
Normal file
56
py/services/use_cases/auto_organize_use_case.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Auto-organize use case orchestrating concurrency and progress handling."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from typing import Optional, Protocol, Sequence
|
||||||
|
|
||||||
|
from ..model_file_service import AutoOrganizeResult, ModelFileService, ProgressCallback
|
||||||
|
|
||||||
|
|
||||||
|
class AutoOrganizeLockProvider(Protocol):
|
||||||
|
"""Minimal protocol for objects exposing auto-organize locking primitives."""
|
||||||
|
|
||||||
|
def is_auto_organize_running(self) -> bool:
|
||||||
|
"""Return ``True`` when an auto-organize operation is in-flight."""
|
||||||
|
|
||||||
|
async def get_auto_organize_lock(self) -> asyncio.Lock:
|
||||||
|
"""Return the asyncio lock guarding auto-organize operations."""
|
||||||
|
|
||||||
|
|
||||||
|
class AutoOrganizeInProgressError(RuntimeError):
|
||||||
|
"""Raised when an auto-organize run is already active."""
|
||||||
|
|
||||||
|
|
||||||
|
class AutoOrganizeUseCase:
|
||||||
|
"""Coordinate auto-organize execution behind a shared lock."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_service: ModelFileService,
|
||||||
|
lock_provider: AutoOrganizeLockProvider,
|
||||||
|
) -> None:
|
||||||
|
self._file_service = file_service
|
||||||
|
self._lock_provider = lock_provider
|
||||||
|
|
||||||
|
async def execute(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_paths: Optional[Sequence[str]] = None,
|
||||||
|
progress_callback: Optional[ProgressCallback] = None,
|
||||||
|
) -> AutoOrganizeResult:
|
||||||
|
"""Run the auto-organize routine guarded by a shared lock."""
|
||||||
|
|
||||||
|
if self._lock_provider.is_auto_organize_running():
|
||||||
|
raise AutoOrganizeInProgressError("Auto-organize is already running")
|
||||||
|
|
||||||
|
lock = await self._lock_provider.get_auto_organize_lock()
|
||||||
|
if lock.locked():
|
||||||
|
raise AutoOrganizeInProgressError("Auto-organize is already running")
|
||||||
|
|
||||||
|
async with lock:
|
||||||
|
return await self._file_service.auto_organize_models(
|
||||||
|
file_paths=list(file_paths) if file_paths is not None else None,
|
||||||
|
progress_callback=progress_callback,
|
||||||
|
)
|
||||||
122
py/services/use_cases/bulk_metadata_refresh_use_case.py
Normal file
122
py/services/use_cases/bulk_metadata_refresh_use_case.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
"""Use case encapsulating the bulk metadata refresh orchestration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, Optional, Protocol, Sequence
|
||||||
|
|
||||||
|
from ..metadata_sync_service import MetadataSyncService
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataRefreshProgressReporter(Protocol):
|
||||||
|
"""Protocol for progress reporters used during metadata refresh."""
|
||||||
|
|
||||||
|
async def on_progress(self, payload: Dict[str, Any]) -> None:
|
||||||
|
"""Handle a metadata refresh progress update."""
|
||||||
|
|
||||||
|
|
||||||
|
class BulkMetadataRefreshUseCase:
|
||||||
|
"""Coordinate bulk metadata refreshes with progress emission."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
service,
|
||||||
|
metadata_sync: MetadataSyncService,
|
||||||
|
settings_service,
|
||||||
|
logger: Optional[logging.Logger] = None,
|
||||||
|
) -> None:
|
||||||
|
self._service = service
|
||||||
|
self._metadata_sync = metadata_sync
|
||||||
|
self._settings = settings_service
|
||||||
|
self._logger = logger or logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def execute(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
progress_callback: Optional[MetadataRefreshProgressReporter] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Refresh metadata for all qualifying models."""
|
||||||
|
|
||||||
|
cache = await self._service.scanner.get_cached_data()
|
||||||
|
total_models = len(cache.raw_data)
|
||||||
|
|
||||||
|
enable_metadata_archive_db = self._settings.get("enable_metadata_archive_db", False)
|
||||||
|
to_process: Sequence[Dict[str, Any]] = [
|
||||||
|
model
|
||||||
|
for model in cache.raw_data
|
||||||
|
if model.get("sha256")
|
||||||
|
and (not model.get("civitai") or not model["civitai"].get("id"))
|
||||||
|
and (
|
||||||
|
(enable_metadata_archive_db and not model.get("db_checked", False))
|
||||||
|
or (not enable_metadata_archive_db and model.get("from_civitai") is True)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
total_to_process = len(to_process)
|
||||||
|
processed = 0
|
||||||
|
success = 0
|
||||||
|
needs_resort = False
|
||||||
|
|
||||||
|
async def emit(status: str, **extra: Any) -> None:
|
||||||
|
if progress_callback is None:
|
||||||
|
return
|
||||||
|
payload = {"status": status, "total": total_to_process, "processed": processed, "success": success}
|
||||||
|
payload.update(extra)
|
||||||
|
await progress_callback.on_progress(payload)
|
||||||
|
|
||||||
|
await emit("started")
|
||||||
|
|
||||||
|
for model in to_process:
|
||||||
|
try:
|
||||||
|
original_name = model.get("model_name")
|
||||||
|
result, _ = await self._metadata_sync.fetch_and_update_model(
|
||||||
|
sha256=model["sha256"],
|
||||||
|
file_path=model["file_path"],
|
||||||
|
model_data=model,
|
||||||
|
update_cache_func=self._service.scanner.update_single_model_cache,
|
||||||
|
)
|
||||||
|
if result:
|
||||||
|
success += 1
|
||||||
|
if original_name != model.get("model_name"):
|
||||||
|
needs_resort = True
|
||||||
|
processed += 1
|
||||||
|
await emit(
|
||||||
|
"processing",
|
||||||
|
processed=processed,
|
||||||
|
success=success,
|
||||||
|
current_name=model.get("model_name", "Unknown"),
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - logging path
|
||||||
|
processed += 1
|
||||||
|
self._logger.error(
|
||||||
|
"Error fetching CivitAI data for %s: %s",
|
||||||
|
model.get("file_path"),
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
|
||||||
|
if needs_resort:
|
||||||
|
await cache.resort()
|
||||||
|
|
||||||
|
await emit("completed", processed=processed, success=success)
|
||||||
|
|
||||||
|
message = (
|
||||||
|
"Successfully updated "
|
||||||
|
f"{success} of {processed} processed {self._service.model_type}s (total: {total_models})"
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"success": True, "message": message, "processed": processed, "updated": success, "total": total_models}
|
||||||
|
|
||||||
|
async def execute_with_error_handling(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
progress_callback: Optional[MetadataRefreshProgressReporter] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Wrapper providing progress notification on unexpected failures."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.execute(progress_callback=progress_callback)
|
||||||
|
except Exception as exc:
|
||||||
|
if progress_callback is not None:
|
||||||
|
await progress_callback.on_progress({"status": "error", "error": str(exc)})
|
||||||
|
raise
|
||||||
37
py/services/use_cases/download_model_use_case.py
Normal file
37
py/services/use_cases/download_model_use_case.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
"""Use case for scheduling model downloads with consistent error handling."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from ..download_coordinator import DownloadCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadModelValidationError(ValueError):
|
||||||
|
"""Raised when incoming payload validation fails."""
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadModelEarlyAccessError(RuntimeError):
|
||||||
|
"""Raised when the download is gated behind Civitai early access."""
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadModelUseCase:
|
||||||
|
"""Coordinate download scheduling through the coordinator service."""
|
||||||
|
|
||||||
|
def __init__(self, *, download_coordinator: DownloadCoordinator) -> None:
|
||||||
|
self._download_coordinator = download_coordinator
|
||||||
|
|
||||||
|
async def execute(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Schedule a download and normalize error conditions."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self._download_coordinator.schedule_download(payload)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise DownloadModelValidationError(str(exc)) from exc
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging path
|
||||||
|
message = str(exc)
|
||||||
|
if "401" in message:
|
||||||
|
raise DownloadModelEarlyAccessError(
|
||||||
|
"Early Access Restriction: This model requires purchase. Please buy early access on Civitai.com."
|
||||||
|
) from exc
|
||||||
|
raise
|
||||||
19
py/services/use_cases/example_images/__init__.py
Normal file
19
py/services/use_cases/example_images/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
"""Example image specific use case exports."""
|
||||||
|
|
||||||
|
from .download_example_images_use_case import (
|
||||||
|
DownloadExampleImagesUseCase,
|
||||||
|
DownloadExampleImagesInProgressError,
|
||||||
|
DownloadExampleImagesConfigurationError,
|
||||||
|
)
|
||||||
|
from .import_example_images_use_case import (
|
||||||
|
ImportExampleImagesUseCase,
|
||||||
|
ImportExampleImagesValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DownloadExampleImagesUseCase",
|
||||||
|
"DownloadExampleImagesInProgressError",
|
||||||
|
"DownloadExampleImagesConfigurationError",
|
||||||
|
"ImportExampleImagesUseCase",
|
||||||
|
"ImportExampleImagesValidationError",
|
||||||
|
]
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
"""Use case coordinating example image downloads."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from ....utils.example_images_download_manager import (
|
||||||
|
DownloadConfigurationError,
|
||||||
|
DownloadInProgressError,
|
||||||
|
ExampleImagesDownloadError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadExampleImagesInProgressError(RuntimeError):
|
||||||
|
"""Raised when a download is already running."""
|
||||||
|
|
||||||
|
def __init__(self, progress: Dict[str, Any]) -> None:
|
||||||
|
super().__init__("Download already in progress")
|
||||||
|
self.progress = progress
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadExampleImagesConfigurationError(ValueError):
|
||||||
|
"""Raised when settings prevent downloads from starting."""
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadExampleImagesUseCase:
|
||||||
|
"""Validate payloads and trigger the download manager."""
|
||||||
|
|
||||||
|
def __init__(self, *, download_manager) -> None:
|
||||||
|
self._download_manager = download_manager
|
||||||
|
|
||||||
|
async def execute(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Start a download and translate manager errors."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self._download_manager.start_download(payload)
|
||||||
|
except DownloadInProgressError as exc:
|
||||||
|
raise DownloadExampleImagesInProgressError(exc.progress_snapshot) from exc
|
||||||
|
except DownloadConfigurationError as exc:
|
||||||
|
raise DownloadExampleImagesConfigurationError(str(exc)) from exc
|
||||||
|
except ExampleImagesDownloadError:
|
||||||
|
raise
|
||||||
@@ -0,0 +1,86 @@
|
|||||||
|
"""Use case for importing example images."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from contextlib import suppress
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ....utils.example_images_processor import (
|
||||||
|
ExampleImagesImportError,
|
||||||
|
ExampleImagesProcessor,
|
||||||
|
ExampleImagesValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportExampleImagesValidationError(ValueError):
|
||||||
|
"""Raised when request validation fails."""
|
||||||
|
|
||||||
|
|
||||||
|
class ImportExampleImagesUseCase:
|
||||||
|
"""Parse upload payloads and delegate to the processor service."""
|
||||||
|
|
||||||
|
def __init__(self, *, processor: ExampleImagesProcessor) -> None:
|
||||||
|
self._processor = processor
|
||||||
|
|
||||||
|
async def execute(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
model_hash: str | None = None
|
||||||
|
files_to_import: List[str] = []
|
||||||
|
temp_files: List[str] = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
if request.content_type and "multipart/form-data" in request.content_type:
|
||||||
|
reader = await request.multipart()
|
||||||
|
|
||||||
|
first_field = await reader.next()
|
||||||
|
if first_field and first_field.name == "model_hash":
|
||||||
|
model_hash = await first_field.text()
|
||||||
|
else:
|
||||||
|
# Support clients that send files first and hash later
|
||||||
|
if first_field is not None:
|
||||||
|
await self._collect_upload_file(first_field, files_to_import, temp_files)
|
||||||
|
|
||||||
|
async for field in reader:
|
||||||
|
if field.name == "model_hash" and not model_hash:
|
||||||
|
model_hash = await field.text()
|
||||||
|
elif field.name == "files":
|
||||||
|
await self._collect_upload_file(field, files_to_import, temp_files)
|
||||||
|
else:
|
||||||
|
data = await request.json()
|
||||||
|
model_hash = data.get("model_hash")
|
||||||
|
files_to_import = list(data.get("file_paths", []))
|
||||||
|
|
||||||
|
result = await self._processor.import_images(model_hash, files_to_import)
|
||||||
|
return result
|
||||||
|
except ExampleImagesValidationError as exc:
|
||||||
|
raise ImportExampleImagesValidationError(str(exc)) from exc
|
||||||
|
except ExampleImagesImportError:
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
for path in temp_files:
|
||||||
|
with suppress(Exception):
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
async def _collect_upload_file(
|
||||||
|
self,
|
||||||
|
field: Any,
|
||||||
|
files_to_import: List[str],
|
||||||
|
temp_files: List[str],
|
||||||
|
) -> None:
|
||||||
|
"""Persist an uploaded file to disk and add it to the import list."""
|
||||||
|
|
||||||
|
filename = field.filename or "upload"
|
||||||
|
file_ext = os.path.splitext(filename)[1].lower()
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=file_ext, delete=False) as tmp_file:
|
||||||
|
temp_files.append(tmp_file.name)
|
||||||
|
while True:
|
||||||
|
chunk = await field.read_chunk()
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
tmp_file.write(chunk)
|
||||||
|
|
||||||
|
files_to_import.append(tmp_file.name)
|
||||||
@@ -16,6 +16,8 @@ class WebSocketManager:
|
|||||||
self._download_websockets: Dict[str, web.WebSocketResponse] = {} # New dict for download-specific clients
|
self._download_websockets: Dict[str, web.WebSocketResponse] = {} # New dict for download-specific clients
|
||||||
# Add progress tracking dictionary
|
# Add progress tracking dictionary
|
||||||
self._download_progress: Dict[str, Dict] = {}
|
self._download_progress: Dict[str, Dict] = {}
|
||||||
|
# Cache last initialization progress payloads
|
||||||
|
self._last_init_progress: Dict[str, Dict] = {}
|
||||||
# Add auto-organize progress tracking
|
# Add auto-organize progress tracking
|
||||||
self._auto_organize_progress: Optional[Dict] = None
|
self._auto_organize_progress: Optional[Dict] = None
|
||||||
self._auto_organize_lock = asyncio.Lock()
|
self._auto_organize_lock = asyncio.Lock()
|
||||||
@@ -39,8 +41,10 @@ class WebSocketManager:
|
|||||||
ws = web.WebSocketResponse()
|
ws = web.WebSocketResponse()
|
||||||
await ws.prepare(request)
|
await ws.prepare(request)
|
||||||
self._init_websockets.add(ws)
|
self._init_websockets.add(ws)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
await self._send_cached_init_progress(ws)
|
||||||
|
|
||||||
async for msg in ws:
|
async for msg in ws:
|
||||||
if msg.type == web.WSMsgType.ERROR:
|
if msg.type == web.WSMsgType.ERROR:
|
||||||
logger.error(f'Init WebSocket error: {ws.exception()}')
|
logger.error(f'Init WebSocket error: {ws.exception()}')
|
||||||
@@ -102,23 +106,53 @@ class WebSocketManager:
|
|||||||
|
|
||||||
async def broadcast_init_progress(self, data: Dict):
|
async def broadcast_init_progress(self, data: Dict):
|
||||||
"""Broadcast initialization progress to connected clients"""
|
"""Broadcast initialization progress to connected clients"""
|
||||||
|
payload = dict(data) if data else {}
|
||||||
|
|
||||||
|
if 'stage' not in payload:
|
||||||
|
payload['stage'] = 'processing'
|
||||||
|
if 'progress' not in payload:
|
||||||
|
payload['progress'] = 0
|
||||||
|
if 'details' not in payload:
|
||||||
|
payload['details'] = 'Processing...'
|
||||||
|
|
||||||
|
key = self._get_init_progress_key(payload)
|
||||||
|
self._last_init_progress[key] = dict(payload)
|
||||||
|
|
||||||
if not self._init_websockets:
|
if not self._init_websockets:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Ensure data has all required fields
|
stale_clients = []
|
||||||
if 'stage' not in data:
|
for ws in list(self._init_websockets):
|
||||||
data['stage'] = 'processing'
|
|
||||||
if 'progress' not in data:
|
|
||||||
data['progress'] = 0
|
|
||||||
if 'details' not in data:
|
|
||||||
data['details'] = 'Processing...'
|
|
||||||
|
|
||||||
for ws in self._init_websockets:
|
|
||||||
try:
|
try:
|
||||||
await ws.send_json(data)
|
await ws.send_json(payload)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error sending initialization progress: {e}")
|
logger.error(f"Error sending initialization progress: {e}")
|
||||||
|
stale_clients.append(ws)
|
||||||
|
|
||||||
|
for ws in stale_clients:
|
||||||
|
self._init_websockets.discard(ws)
|
||||||
|
|
||||||
|
async def _send_cached_init_progress(self, ws: web.WebSocketResponse) -> None:
|
||||||
|
"""Send cached initialization progress payloads to a new client"""
|
||||||
|
if not self._last_init_progress:
|
||||||
|
return
|
||||||
|
|
||||||
|
for payload in list(self._last_init_progress.values()):
|
||||||
|
try:
|
||||||
|
await ws.send_json(payload)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f'Error sending cached initialization progress: {e}')
|
||||||
|
|
||||||
|
def _get_init_progress_key(self, data: Dict) -> str:
|
||||||
|
"""Return a stable key for caching initialization progress payloads"""
|
||||||
|
page_type = data.get('pageType')
|
||||||
|
if page_type:
|
||||||
|
return f'page:{page_type}'
|
||||||
|
scanner_type = data.get('scanner_type')
|
||||||
|
if scanner_type:
|
||||||
|
return f'scanner:{scanner_type}'
|
||||||
|
return 'global'
|
||||||
|
|
||||||
async def broadcast_download_progress(self, download_id: str, data: Dict):
|
async def broadcast_download_progress(self, download_id: str, data: Dict):
|
||||||
"""Send progress update to specific download client"""
|
"""Send progress update to specific download client"""
|
||||||
# Store simplified progress data in memory (only progress percentage)
|
# Store simplified progress data in memory (only progress percentage)
|
||||||
@@ -202,4 +236,5 @@ class WebSocketManager:
|
|||||||
return str(uuid4())
|
return str(uuid4())
|
||||||
|
|
||||||
# Global instance
|
# Global instance
|
||||||
ws_manager = WebSocketManager()
|
ws_manager = WebSocketManager()
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,29 @@
|
|||||||
from typing import Dict, Any
|
"""Progress callback implementations backed by the shared WebSocket manager."""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Protocol
|
||||||
|
|
||||||
from .model_file_service import ProgressCallback
|
from .model_file_service import ProgressCallback
|
||||||
from .websocket_manager import ws_manager
|
from .websocket_manager import ws_manager
|
||||||
|
|
||||||
|
|
||||||
class WebSocketProgressCallback(ProgressCallback):
|
class ProgressReporter(Protocol):
|
||||||
"""WebSocket implementation of progress callback"""
|
"""Protocol representing an async progress callback."""
|
||||||
|
|
||||||
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
|
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
|
||||||
"""Send progress data via WebSocket"""
|
"""Handle a progress update payload."""
|
||||||
await ws_manager.broadcast_auto_organize_progress(progress_data)
|
|
||||||
|
|
||||||
|
class WebSocketProgressCallback(ProgressCallback):
|
||||||
|
"""WebSocket implementation of progress callback."""
|
||||||
|
|
||||||
|
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
|
||||||
|
"""Send progress data via WebSocket."""
|
||||||
|
await ws_manager.broadcast_auto_organize_progress(progress_data)
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketBroadcastCallback:
|
||||||
|
"""Generic WebSocket progress callback broadcasting to all clients."""
|
||||||
|
|
||||||
|
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
|
||||||
|
"""Send the provided payload to all connected clients."""
|
||||||
|
await ws_manager.broadcast(progress_data)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,10 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from ..services.settings_manager import settings
|
from ..services.settings_manager import settings
|
||||||
|
from ..utils.example_images_paths import (
|
||||||
|
get_model_folder,
|
||||||
|
get_model_relative_path,
|
||||||
|
)
|
||||||
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -41,8 +45,12 @@ class ExampleImagesFileManager:
|
|||||||
}, status=400)
|
}, status=400)
|
||||||
|
|
||||||
# Construct folder path for this model
|
# Construct folder path for this model
|
||||||
model_folder = os.path.join(example_images_path, model_hash)
|
model_folder = get_model_folder(model_hash)
|
||||||
model_folder = os.path.abspath(model_folder) # Get absolute path
|
if not model_folder:
|
||||||
|
return web.json_response({
|
||||||
|
'success': False,
|
||||||
|
'error': 'Failed to resolve example images folder for this model.'
|
||||||
|
}, status=500)
|
||||||
|
|
||||||
# Path validation: ensure model_folder is under example_images_path
|
# Path validation: ensure model_folder is under example_images_path
|
||||||
if not model_folder.startswith(os.path.abspath(example_images_path)):
|
if not model_folder.startswith(os.path.abspath(example_images_path)):
|
||||||
@@ -109,8 +117,13 @@ class ExampleImagesFileManager:
|
|||||||
}, status=400)
|
}, status=400)
|
||||||
|
|
||||||
# Construct folder path for this model
|
# Construct folder path for this model
|
||||||
model_folder = os.path.join(example_images_path, model_hash)
|
model_folder = get_model_folder(model_hash)
|
||||||
|
if not model_folder:
|
||||||
|
return web.json_response({
|
||||||
|
'success': False,
|
||||||
|
'error': 'Failed to resolve example images folder for this model'
|
||||||
|
}, status=500)
|
||||||
|
|
||||||
# Check if folder exists
|
# Check if folder exists
|
||||||
if not os.path.exists(model_folder):
|
if not os.path.exists(model_folder):
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
@@ -128,9 +141,10 @@ class ExampleImagesFileManager:
|
|||||||
file_ext = os.path.splitext(file)[1].lower()
|
file_ext = os.path.splitext(file)[1].lower()
|
||||||
if (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
|
if (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
|
||||||
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
|
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
|
||||||
|
relative_path = get_model_relative_path(model_hash)
|
||||||
files.append({
|
files.append({
|
||||||
'name': file,
|
'name': file,
|
||||||
'path': f'/example_images_static/{model_hash}/{file}',
|
'path': f'/example_images_static/{relative_path}/{file}',
|
||||||
'extension': file_ext,
|
'extension': file_ext,
|
||||||
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
|
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
|
||||||
})
|
})
|
||||||
@@ -176,8 +190,13 @@ class ExampleImagesFileManager:
|
|||||||
})
|
})
|
||||||
|
|
||||||
# Construct folder path for this model
|
# Construct folder path for this model
|
||||||
model_folder = os.path.join(example_images_path, model_hash)
|
model_folder = get_model_folder(model_hash)
|
||||||
|
if not model_folder:
|
||||||
|
return web.json_response({
|
||||||
|
'has_images': False,
|
||||||
|
'error': 'Failed to resolve example images folder for this model'
|
||||||
|
})
|
||||||
|
|
||||||
# Check if folder exists
|
# Check if folder exists
|
||||||
if not os.path.exists(model_folder) or not os.path.isdir(model_folder):
|
if not os.path.exists(model_folder) or not os.path.isdir(model_folder):
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
|
|||||||
@@ -1,19 +1,39 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from ..utils.metadata_manager import MetadataManager
|
|
||||||
from ..utils.routes_common import ModelRouteUtils
|
from ..recipes.constants import GEN_PARAM_KEYS
|
||||||
|
from ..services.metadata_service import get_default_metadata_provider, get_metadata_provider
|
||||||
|
from ..services.metadata_sync_service import MetadataSyncService
|
||||||
|
from ..services.preview_asset_service import PreviewAssetService
|
||||||
|
from ..services.settings_manager import settings
|
||||||
|
from ..services.downloader import get_downloader
|
||||||
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
||||||
from ..utils.exif_utils import ExifUtils
|
from ..utils.exif_utils import ExifUtils
|
||||||
from ..recipes.constants import GEN_PARAM_KEYS
|
from ..utils.metadata_manager import MetadataManager
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_preview_service = PreviewAssetService(
|
||||||
|
metadata_manager=MetadataManager,
|
||||||
|
downloader_factory=get_downloader,
|
||||||
|
exif_utils=ExifUtils,
|
||||||
|
)
|
||||||
|
|
||||||
|
_metadata_sync_service = MetadataSyncService(
|
||||||
|
metadata_manager=MetadataManager,
|
||||||
|
preview_service=_preview_service,
|
||||||
|
settings=settings,
|
||||||
|
default_metadata_provider_factory=get_default_metadata_provider,
|
||||||
|
metadata_provider_selector=get_metadata_provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MetadataUpdater:
|
class MetadataUpdater:
|
||||||
"""Handles updating model metadata related to example images"""
|
"""Handles updating model metadata related to example images"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def refresh_model_metadata(model_hash, model_name, scanner_type, scanner):
|
async def refresh_model_metadata(model_hash, model_name, scanner_type, scanner, progress: dict | None = None):
|
||||||
"""Refresh model metadata from CivitAI
|
"""Refresh model metadata from CivitAI
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -25,8 +45,6 @@ class MetadataUpdater:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if metadata was successfully refreshed, False otherwise
|
bool: True if metadata was successfully refreshed, False otherwise
|
||||||
"""
|
"""
|
||||||
from ..utils.example_images_download_manager import download_progress
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Find the model in the scanner cache
|
# Find the model in the scanner cache
|
||||||
cache = await scanner.get_cached_data()
|
cache = await scanner.get_cached_data()
|
||||||
@@ -47,50 +65,65 @@ class MetadataUpdater:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Track that we're refreshing this model
|
# Track that we're refreshing this model
|
||||||
download_progress['refreshed_models'].add(model_hash)
|
if progress is not None:
|
||||||
|
progress['refreshed_models'].add(model_hash)
|
||||||
|
|
||||||
# Use ModelRouteUtils to refresh metadata
|
|
||||||
async def update_cache_func(old_path, new_path, metadata):
|
async def update_cache_func(old_path, new_path, metadata):
|
||||||
return await scanner.update_single_model_cache(old_path, new_path, metadata)
|
return await scanner.update_single_model_cache(old_path, new_path, metadata)
|
||||||
|
|
||||||
success = await ModelRouteUtils.fetch_and_update_model(
|
success, error = await _metadata_sync_service.fetch_and_update_model(
|
||||||
model_hash,
|
sha256=model_hash,
|
||||||
file_path,
|
file_path=file_path,
|
||||||
model_data,
|
model_data=model_data,
|
||||||
update_cache_func
|
update_cache_func=update_cache_func,
|
||||||
)
|
)
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
logger.info(f"Successfully refreshed metadata for {model_name}")
|
logger.info(f"Successfully refreshed metadata for {model_name}")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Failed to refresh metadata for {model_name}")
|
logger.warning(f"Failed to refresh metadata for {model_name}, {error}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"Error refreshing metadata for {model_name}: {str(e)}"
|
error_msg = f"Error refreshing metadata for {model_name}: {str(e)}"
|
||||||
logger.error(error_msg, exc_info=True)
|
logger.error(error_msg, exc_info=True)
|
||||||
download_progress['errors'].append(error_msg)
|
if progress is not None:
|
||||||
download_progress['last_error'] = error_msg
|
progress['errors'].append(error_msg)
|
||||||
|
progress['last_error'] = error_msg
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_updated_model(model_hash, scanner):
|
async def get_updated_model(model_hash, scanner):
|
||||||
"""Get updated model data
|
"""Load the most recent metadata for a model identified by hash."""
|
||||||
|
|
||||||
Args:
|
|
||||||
model_hash: SHA256 hash of the model
|
|
||||||
scanner: Scanner instance
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Updated model data or None if not found
|
|
||||||
"""
|
|
||||||
cache = await scanner.get_cached_data()
|
cache = await scanner.get_cached_data()
|
||||||
|
target = None
|
||||||
for item in cache.raw_data:
|
for item in cache.raw_data:
|
||||||
if item.get('sha256') == model_hash:
|
if item.get('sha256') == model_hash:
|
||||||
return item
|
target = item
|
||||||
return None
|
break
|
||||||
|
|
||||||
|
if not target:
|
||||||
|
return None
|
||||||
|
|
||||||
|
file_path = target.get('file_path')
|
||||||
|
if not file_path:
|
||||||
|
return target
|
||||||
|
|
||||||
|
model_cls = getattr(scanner, 'model_class', None)
|
||||||
|
if model_cls is None:
|
||||||
|
metadata, should_skip = await MetadataManager.load_metadata(file_path)
|
||||||
|
else:
|
||||||
|
metadata, should_skip = await MetadataManager.load_metadata(file_path, model_cls)
|
||||||
|
|
||||||
|
if should_skip or metadata is None:
|
||||||
|
return target
|
||||||
|
|
||||||
|
rich_metadata = metadata.to_dict()
|
||||||
|
rich_metadata.setdefault('folder', target.get('folder', ''))
|
||||||
|
return rich_metadata
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_metadata_from_local_examples(model_hash, model, scanner_type, scanner, model_dir):
|
async def update_metadata_from_local_examples(model_hash, model, scanner_type, scanner, model_dir):
|
||||||
"""Update model metadata with local example image information
|
"""Update model metadata with local example image information
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import re
|
|||||||
import json
|
import json
|
||||||
from ..services.settings_manager import settings
|
from ..services.settings_manager import settings
|
||||||
from ..services.service_registry import ServiceRegistry
|
from ..services.service_registry import ServiceRegistry
|
||||||
|
from ..utils.example_images_paths import iter_library_roots
|
||||||
from ..utils.metadata_manager import MetadataManager
|
from ..utils.metadata_manager import MetadataManager
|
||||||
from ..utils.example_images_processor import ExampleImagesProcessor
|
from ..utils.example_images_processor import ExampleImagesProcessor
|
||||||
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
||||||
@@ -19,29 +20,35 @@ class ExampleImagesMigration:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def check_and_run_migrations():
|
async def check_and_run_migrations():
|
||||||
"""Check if migrations are needed and run them in background"""
|
"""Check if migrations are needed and run them in background"""
|
||||||
example_images_path = settings.get('example_images_path')
|
root = settings.get('example_images_path')
|
||||||
if not example_images_path or not os.path.exists(example_images_path):
|
if not root or not os.path.exists(root):
|
||||||
logger.debug("No example images path configured or path doesn't exist, skipping migrations")
|
logger.debug("No example images path configured or path doesn't exist, skipping migrations")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Check current version from progress file
|
for library_name, library_path in iter_library_roots():
|
||||||
current_version = 0
|
if not library_path or not os.path.exists(library_path):
|
||||||
progress_file = os.path.join(example_images_path, '.download_progress.json')
|
continue
|
||||||
if os.path.exists(progress_file):
|
|
||||||
try:
|
current_version = 0
|
||||||
with open(progress_file, 'r', encoding='utf-8') as f:
|
progress_file = os.path.join(library_path, '.download_progress.json')
|
||||||
progress_data = json.load(f)
|
if os.path.exists(progress_file):
|
||||||
current_version = progress_data.get('naming_version', 0)
|
try:
|
||||||
except Exception as e:
|
with open(progress_file, 'r', encoding='utf-8') as f:
|
||||||
logger.error(f"Failed to load progress file for migration check: {e}")
|
progress_data = json.load(f)
|
||||||
|
current_version = progress_data.get('naming_version', 0)
|
||||||
# If current version is less than target version, start migration
|
except Exception as e:
|
||||||
if current_version < CURRENT_NAMING_VERSION:
|
logger.error(f"Failed to load progress file for migration check: {e}")
|
||||||
logger.info(f"Starting example images naming migration from v{current_version} to v{CURRENT_NAMING_VERSION}")
|
|
||||||
# Start migration in background task
|
if current_version < CURRENT_NAMING_VERSION:
|
||||||
asyncio.create_task(
|
logger.info(
|
||||||
ExampleImagesMigration.run_migrations(example_images_path, current_version, CURRENT_NAMING_VERSION)
|
"Starting example images naming migration from v%s to v%s for library '%s'",
|
||||||
)
|
current_version,
|
||||||
|
CURRENT_NAMING_VERSION,
|
||||||
|
library_name,
|
||||||
|
)
|
||||||
|
asyncio.create_task(
|
||||||
|
ExampleImagesMigration.run_migrations(library_path, current_version, CURRENT_NAMING_VERSION)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def run_migrations(example_images_path, from_version, to_version):
|
async def run_migrations(example_images_path, from_version, to_version):
|
||||||
|
|||||||
221
py/utils/example_images_paths.py
Normal file
221
py/utils/example_images_paths.py
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
"""Utility helpers for resolving example image storage paths."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from typing import Iterable, List, Optional, Tuple
|
||||||
|
|
||||||
|
from ..services.settings_manager import settings
|
||||||
|
|
||||||
|
_HEX_PATTERN = re.compile(r"[a-fA-F0-9]{64}")
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_configured_libraries() -> List[str]:
|
||||||
|
"""Return configured library names if multi-library support is enabled."""
|
||||||
|
|
||||||
|
libraries = settings.get("libraries")
|
||||||
|
if isinstance(libraries, dict) and libraries:
|
||||||
|
return list(libraries.keys())
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def get_example_images_root() -> str:
|
||||||
|
"""Return the root directory configured for example images."""
|
||||||
|
|
||||||
|
root = settings.get("example_images_path") or ""
|
||||||
|
return os.path.abspath(root) if root else ""
|
||||||
|
|
||||||
|
|
||||||
|
def uses_library_scoped_folders() -> bool:
|
||||||
|
"""Return True when example images should be separated per library."""
|
||||||
|
|
||||||
|
libraries = _get_configured_libraries()
|
||||||
|
return len(libraries) > 1
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_library_name(library_name: Optional[str]) -> str:
|
||||||
|
"""Return a filesystem safe library name."""
|
||||||
|
|
||||||
|
name = library_name or settings.get_active_library_name() or "default"
|
||||||
|
safe_name = re.sub(r"[^A-Za-z0-9_.-]", "_", name)
|
||||||
|
return safe_name or "default"
|
||||||
|
|
||||||
|
|
||||||
|
def get_library_root(library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the directory where a library's example images should live."""
|
||||||
|
|
||||||
|
root = get_example_images_root()
|
||||||
|
if not root:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if uses_library_scoped_folders():
|
||||||
|
return os.path.join(root, sanitize_library_name(library_name))
|
||||||
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_library_root_exists(library_name: Optional[str] = None) -> str:
|
||||||
|
"""Ensure the example image directory for a library exists and return it."""
|
||||||
|
|
||||||
|
library_root = get_library_root(library_name)
|
||||||
|
if library_root:
|
||||||
|
os.makedirs(library_root, exist_ok=True)
|
||||||
|
return library_root
|
||||||
|
|
||||||
|
|
||||||
|
def get_model_folder(model_hash: str, library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the folder path for a model's example images."""
|
||||||
|
|
||||||
|
if not model_hash:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
library_root = ensure_library_root_exists(library_name)
|
||||||
|
if not library_root:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
normalized_hash = (model_hash or "").lower()
|
||||||
|
resolved_folder = os.path.join(library_root, normalized_hash)
|
||||||
|
|
||||||
|
if uses_library_scoped_folders():
|
||||||
|
legacy_root = get_example_images_root()
|
||||||
|
legacy_folder = os.path.join(legacy_root, normalized_hash)
|
||||||
|
if os.path.exists(legacy_folder) and not os.path.exists(resolved_folder):
|
||||||
|
try:
|
||||||
|
os.makedirs(library_root, exist_ok=True)
|
||||||
|
shutil.move(legacy_folder, resolved_folder)
|
||||||
|
logger.info(
|
||||||
|
"Migrated legacy example images folder '%s' to '%s'", legacy_folder, resolved_folder
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
logger.error(
|
||||||
|
"Failed to migrate example images from '%s' to '%s': %s",
|
||||||
|
legacy_folder,
|
||||||
|
resolved_folder,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
return legacy_folder
|
||||||
|
|
||||||
|
return resolved_folder
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagePathResolver:
|
||||||
|
"""Convenience wrapper exposing example image path helpers."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_model_folder(model_hash: str, library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the example image folder for a model, migrating legacy paths."""
|
||||||
|
|
||||||
|
return get_model_folder(model_hash, library_name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_library_root(library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the configured library root for example images."""
|
||||||
|
|
||||||
|
return get_library_root(library_name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ensure_library_root_exists(library_name: Optional[str] = None) -> str:
|
||||||
|
"""Ensure the library root exists before writing files."""
|
||||||
|
|
||||||
|
return ensure_library_root_exists(library_name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_model_relative_path(model_hash: str, library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the relative path to a model folder from the static mount point."""
|
||||||
|
|
||||||
|
return get_model_relative_path(model_hash, library_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_model_relative_path(model_hash: str, library_name: Optional[str] = None) -> str:
|
||||||
|
"""Return the relative URL path from the static mount to a model folder."""
|
||||||
|
|
||||||
|
root = get_example_images_root()
|
||||||
|
folder = get_model_folder(model_hash, library_name)
|
||||||
|
if not root or not folder:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
relative = os.path.relpath(folder, root)
|
||||||
|
except ValueError:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
return relative.replace("\\", "/")
|
||||||
|
|
||||||
|
|
||||||
|
def iter_library_roots() -> Iterable[Tuple[str, str]]:
|
||||||
|
"""Yield configured library names and their resolved filesystem roots."""
|
||||||
|
|
||||||
|
root = get_example_images_root()
|
||||||
|
if not root:
|
||||||
|
return []
|
||||||
|
|
||||||
|
libraries = _get_configured_libraries()
|
||||||
|
if uses_library_scoped_folders():
|
||||||
|
results: List[Tuple[str, str]] = []
|
||||||
|
if libraries:
|
||||||
|
for library in libraries:
|
||||||
|
results.append((library, get_library_root(library)))
|
||||||
|
else:
|
||||||
|
# Fall back to the active library to avoid skipping migrations/cleanup
|
||||||
|
active = settings.get_active_library_name() or "default"
|
||||||
|
results.append((active, get_library_root(active)))
|
||||||
|
return results
|
||||||
|
|
||||||
|
active = settings.get_active_library_name() or "default"
|
||||||
|
return [(active, root)]
|
||||||
|
|
||||||
|
|
||||||
|
def is_hash_folder(name: str) -> bool:
|
||||||
|
"""Return True if the provided name looks like a model hash folder."""
|
||||||
|
|
||||||
|
return bool(_HEX_PATTERN.fullmatch(name or ""))
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_example_images_root(folder_path: str) -> bool:
|
||||||
|
"""Check whether a folder looks like a dedicated example images root."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
items = os.listdir(folder_path)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
item_path = os.path.join(folder_path, item)
|
||||||
|
if item == ".download_progress.json" and os.path.isfile(item_path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if os.path.isdir(item_path):
|
||||||
|
if is_hash_folder(item):
|
||||||
|
continue
|
||||||
|
if item == "_deleted":
|
||||||
|
# Allow cleanup staging folders
|
||||||
|
continue
|
||||||
|
# When multi-library mode is active we expect nested hash folders
|
||||||
|
if uses_library_scoped_folders():
|
||||||
|
if _library_folder_has_only_hash_dirs(item_path):
|
||||||
|
continue
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _library_folder_has_only_hash_dirs(path: str) -> bool:
|
||||||
|
"""Return True when a library subfolder only contains hash folders or metadata files."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
for entry in os.listdir(path):
|
||||||
|
entry_path = os.path.join(path, entry)
|
||||||
|
if entry == ".download_progress.json" and os.path.isfile(entry_path):
|
||||||
|
continue
|
||||||
|
if entry == "_deleted" and os.path.isdir(entry_path):
|
||||||
|
continue
|
||||||
|
if not os.path.isdir(entry_path) or not is_hash_folder(entry):
|
||||||
|
return False
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
@@ -1,18 +1,26 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import tempfile
|
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
|
||||||
from ..services.service_registry import ServiceRegistry
|
from ..services.service_registry import ServiceRegistry
|
||||||
from ..services.settings_manager import settings
|
from ..services.settings_manager import settings
|
||||||
|
from ..utils.example_images_paths import get_model_folder, get_model_relative_path
|
||||||
from .example_images_metadata import MetadataUpdater
|
from .example_images_metadata import MetadataUpdater
|
||||||
from ..utils.metadata_manager import MetadataManager
|
from ..utils.metadata_manager import MetadataManager
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesImportError(RuntimeError):
|
||||||
|
"""Base error for example image import operations."""
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleImagesValidationError(ExampleImagesImportError):
|
||||||
|
"""Raised when input validation fails."""
|
||||||
|
|
||||||
class ExampleImagesProcessor:
|
class ExampleImagesProcessor:
|
||||||
"""Processes and manipulates example images"""
|
"""Processes and manipulates example images"""
|
||||||
|
|
||||||
@@ -299,90 +307,29 @@ class ExampleImagesProcessor:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def import_images(request):
|
async def import_images(model_hash: str, files_to_import: list[str]):
|
||||||
"""
|
"""Import local example images for a model."""
|
||||||
Import local example images
|
|
||||||
|
if not model_hash:
|
||||||
Accepts:
|
raise ExampleImagesValidationError('Missing model_hash parameter')
|
||||||
- multipart/form-data form with model_hash and files fields
|
|
||||||
or
|
if not files_to_import:
|
||||||
- JSON request with model_hash and file_paths
|
raise ExampleImagesValidationError('No files provided to import')
|
||||||
|
|
||||||
Returns:
|
|
||||||
- Success status and list of imported files
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
model_hash = None
|
|
||||||
files_to_import = []
|
|
||||||
temp_files_to_cleanup = []
|
|
||||||
|
|
||||||
# Check if it's a multipart form-data request (direct file upload)
|
|
||||||
if request.content_type and 'multipart/form-data' in request.content_type:
|
|
||||||
reader = await request.multipart()
|
|
||||||
|
|
||||||
# First get model_hash
|
|
||||||
field = await reader.next()
|
|
||||||
if field.name == 'model_hash':
|
|
||||||
model_hash = await field.text()
|
|
||||||
|
|
||||||
# Then process all files
|
|
||||||
while True:
|
|
||||||
field = await reader.next()
|
|
||||||
if field is None:
|
|
||||||
break
|
|
||||||
|
|
||||||
if field.name == 'files':
|
|
||||||
# Create a temporary file with appropriate suffix for type detection
|
|
||||||
file_name = field.filename
|
|
||||||
file_ext = os.path.splitext(file_name)[1].lower()
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=file_ext, delete=False) as tmp_file:
|
|
||||||
temp_path = tmp_file.name
|
|
||||||
temp_files_to_cleanup.append(temp_path) # Track for cleanup
|
|
||||||
|
|
||||||
# Write chunks to the temporary file
|
|
||||||
while True:
|
|
||||||
chunk = await field.read_chunk()
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
tmp_file.write(chunk)
|
|
||||||
|
|
||||||
# Add to the list of files to process
|
|
||||||
files_to_import.append(temp_path)
|
|
||||||
else:
|
|
||||||
# Parse JSON request (legacy method using file paths)
|
|
||||||
data = await request.json()
|
|
||||||
model_hash = data.get('model_hash')
|
|
||||||
files_to_import = data.get('file_paths', [])
|
|
||||||
|
|
||||||
if not model_hash:
|
|
||||||
return web.json_response({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Missing model_hash parameter'
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
if not files_to_import:
|
|
||||||
return web.json_response({
|
|
||||||
'success': False,
|
|
||||||
'error': 'No files provided to import'
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
# Get example images path
|
# Get example images path
|
||||||
example_images_path = settings.get('example_images_path')
|
example_images_path = settings.get('example_images_path')
|
||||||
if not example_images_path:
|
if not example_images_path:
|
||||||
return web.json_response({
|
raise ExampleImagesValidationError('No example images path configured')
|
||||||
'success': False,
|
|
||||||
'error': 'No example images path configured'
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
# Find the model and get current metadata
|
# Find the model and get current metadata
|
||||||
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||||
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||||
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
||||||
|
|
||||||
model_data = None
|
model_data = None
|
||||||
scanner = None
|
scanner = None
|
||||||
|
|
||||||
# Check both scanners to find the model
|
# Check both scanners to find the model
|
||||||
for scan_obj in [lora_scanner, checkpoint_scanner, embedding_scanner]:
|
for scan_obj in [lora_scanner, checkpoint_scanner, embedding_scanner]:
|
||||||
cache = await scan_obj.get_cached_data()
|
cache = await scan_obj.get_cached_data()
|
||||||
@@ -393,21 +340,22 @@ class ExampleImagesProcessor:
|
|||||||
break
|
break
|
||||||
if model_data:
|
if model_data:
|
||||||
break
|
break
|
||||||
|
|
||||||
if not model_data:
|
if not model_data:
|
||||||
return web.json_response({
|
raise ExampleImagesImportError(
|
||||||
'success': False,
|
f"Model with hash {model_hash} not found in cache"
|
||||||
'error': f"Model with hash {model_hash} not found in cache"
|
)
|
||||||
}, status=404)
|
|
||||||
|
|
||||||
# Create model folder
|
# Create model folder
|
||||||
model_folder = os.path.join(example_images_path, model_hash)
|
model_folder = get_model_folder(model_hash)
|
||||||
|
if not model_folder:
|
||||||
|
raise ExampleImagesImportError('Failed to resolve model folder for example images')
|
||||||
os.makedirs(model_folder, exist_ok=True)
|
os.makedirs(model_folder, exist_ok=True)
|
||||||
|
|
||||||
imported_files = []
|
imported_files = []
|
||||||
errors = []
|
errors = []
|
||||||
newly_imported_paths = []
|
newly_imported_paths = []
|
||||||
|
|
||||||
# Process each file path
|
# Process each file path
|
||||||
for file_path in files_to_import:
|
for file_path in files_to_import:
|
||||||
try:
|
try:
|
||||||
@@ -415,68 +363,60 @@ class ExampleImagesProcessor:
|
|||||||
if not os.path.isfile(file_path):
|
if not os.path.isfile(file_path):
|
||||||
errors.append(f"File not found: {file_path}")
|
errors.append(f"File not found: {file_path}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if file type is supported
|
# Check if file type is supported
|
||||||
file_ext = os.path.splitext(file_path)[1].lower()
|
file_ext = os.path.splitext(file_path)[1].lower()
|
||||||
if not (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
|
if not (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
|
||||||
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
|
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
|
||||||
errors.append(f"Unsupported file type: {file_path}")
|
errors.append(f"Unsupported file type: {file_path}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Generate new filename using short ID instead of UUID
|
# Generate new filename using short ID instead of UUID
|
||||||
short_id = ExampleImagesProcessor.generate_short_id()
|
short_id = ExampleImagesProcessor.generate_short_id()
|
||||||
new_filename = f"custom_{short_id}{file_ext}"
|
new_filename = f"custom_{short_id}{file_ext}"
|
||||||
|
|
||||||
dest_path = os.path.join(model_folder, new_filename)
|
dest_path = os.path.join(model_folder, new_filename)
|
||||||
|
|
||||||
# Copy the file
|
# Copy the file
|
||||||
import shutil
|
import shutil
|
||||||
shutil.copy2(file_path, dest_path)
|
shutil.copy2(file_path, dest_path)
|
||||||
# Store both the dest_path and the short_id
|
# Store both the dest_path and the short_id
|
||||||
newly_imported_paths.append((dest_path, short_id))
|
newly_imported_paths.append((dest_path, short_id))
|
||||||
|
|
||||||
# Add to imported files list
|
# Add to imported files list
|
||||||
imported_files.append({
|
imported_files.append({
|
||||||
'name': new_filename,
|
'name': new_filename,
|
||||||
'path': f'/example_images_static/{model_hash}/{new_filename}',
|
'path': f'/example_images_static/{get_model_relative_path(model_hash)}/{new_filename}',
|
||||||
'extension': file_ext,
|
'extension': file_ext,
|
||||||
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
|
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
|
||||||
})
|
})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errors.append(f"Error importing {file_path}: {str(e)}")
|
errors.append(f"Error importing {file_path}: {str(e)}")
|
||||||
|
|
||||||
# Update metadata with new example images
|
# Update metadata with new example images
|
||||||
regular_images, custom_images = await MetadataUpdater.update_metadata_after_import(
|
regular_images, custom_images = await MetadataUpdater.update_metadata_after_import(
|
||||||
model_hash,
|
model_hash,
|
||||||
model_data,
|
model_data,
|
||||||
scanner,
|
scanner,
|
||||||
newly_imported_paths
|
newly_imported_paths
|
||||||
)
|
)
|
||||||
|
|
||||||
return web.json_response({
|
return {
|
||||||
'success': len(imported_files) > 0,
|
'success': len(imported_files) > 0,
|
||||||
'message': f'Successfully imported {len(imported_files)} files' +
|
'message': f'Successfully imported {len(imported_files)} files' +
|
||||||
(f' with {len(errors)} errors' if errors else ''),
|
(f' with {len(errors)} errors' if errors else ''),
|
||||||
'files': imported_files,
|
'files': imported_files,
|
||||||
'errors': errors,
|
'errors': errors,
|
||||||
'regular_images': regular_images,
|
'regular_images': regular_images,
|
||||||
'custom_images': custom_images,
|
'custom_images': custom_images,
|
||||||
"model_file_path": model_data.get('file_path', ''),
|
"model_file_path": model_data.get('file_path', ''),
|
||||||
})
|
}
|
||||||
|
|
||||||
|
except ExampleImagesImportError:
|
||||||
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to import example images: {e}", exc_info=True)
|
logger.error(f"Failed to import example images: {e}", exc_info=True)
|
||||||
return web.json_response({
|
raise ExampleImagesImportError(str(e)) from e
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}, status=500)
|
|
||||||
finally:
|
|
||||||
# Clean up temporary files
|
|
||||||
for temp_file in temp_files_to_cleanup:
|
|
||||||
try:
|
|
||||||
os.remove(temp_file)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to remove temporary file {temp_file}: {e}")
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def delete_custom_image(request):
|
async def delete_custom_image(request):
|
||||||
@@ -560,7 +500,12 @@ class ExampleImagesProcessor:
|
|||||||
}, status=404)
|
}, status=404)
|
||||||
|
|
||||||
# Find and delete the actual file
|
# Find and delete the actual file
|
||||||
model_folder = os.path.join(example_images_path, model_hash)
|
model_folder = get_model_folder(model_hash)
|
||||||
|
if not model_folder:
|
||||||
|
return web.json_response({
|
||||||
|
'success': False,
|
||||||
|
'error': 'Failed to resolve model folder for example images'
|
||||||
|
}, status=500)
|
||||||
file_deleted = False
|
file_deleted = False
|
||||||
|
|
||||||
if os.path.exists(model_folder):
|
if os.path.exists(model_folder):
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import os
|
import os
|
||||||
from PIL import Image
|
from PIL import Image, PngImagePlugin
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -86,9 +86,10 @@ class ExifUtils:
|
|||||||
|
|
||||||
# For PNG, try to update parameters directly
|
# For PNG, try to update parameters directly
|
||||||
if img_format == 'PNG':
|
if img_format == 'PNG':
|
||||||
# We'll save with parameters in the PNG info
|
# Use PngInfo instead of plain dictionary
|
||||||
info_dict = {'parameters': metadata}
|
png_info = PngImagePlugin.PngInfo()
|
||||||
img.save(image_path, format='PNG', pnginfo=info_dict)
|
png_info.add_text("parameters", metadata)
|
||||||
|
img.save(image_path, format='PNG', pnginfo=png_info)
|
||||||
return image_path
|
return image_path
|
||||||
|
|
||||||
# For WebP format, use PIL's exif parameter directly
|
# For WebP format, use PIL's exif parameter directly
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ class BaseModelMetadata:
|
|||||||
civitai_deleted: bool = False # Whether deleted from Civitai
|
civitai_deleted: bool = False # Whether deleted from Civitai
|
||||||
favorite: bool = False # Whether the model is a favorite
|
favorite: bool = False # Whether the model is a favorite
|
||||||
exclude: bool = False # Whether to exclude this model from the cache
|
exclude: bool = False # Whether to exclude this model from the cache
|
||||||
|
db_checked: bool = False # Whether checked in archive DB
|
||||||
|
last_checked_at: float = 0 # Last checked timestamp
|
||||||
_unknown_fields: Dict[str, Any] = field(default_factory=dict, repr=False, compare=False) # Store unknown fields
|
_unknown_fields: Dict[str, Any] = field(default_factory=dict, repr=False, compare=False) # Store unknown fields
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user