Compare commits

...

228 Commits

Author SHA1 Message Date
Will Miao
9199950b74 feat(release): update version to 0.9.6 and add release notes for critical performance optimizations and new features 2025-10-07 17:41:58 +08:00
Will Miao
4c7e31687b feat(recipe_scanner): reset cached state on active library change 2025-10-07 08:07:44 +08:00
Will Miao
75e207b520 fix(banner): remove redundant community support banner registration 2025-10-06 22:39:49 +08:00
Will Miao
631289b75e feat(banner): add community support banner with Ko-fi integration and translations 2025-10-06 22:39:21 +08:00
Will Miao
1b958d0a5d feat(modal): use CSS variables for header height and improve recipe modal layout 2025-10-06 16:28:56 +08:00
Will Miao
35fdf9020d docs(README): update instructions for settings.json file creation in Portable Edition 2025-10-06 15:32:37 +08:00
Will Miao
45926b1dca feat(constants): add CHROMA model to BASE_MODELS and update categories 2025-10-06 08:48:15 +08:00
Will Miao
686ba5024d fix(tests): add loadingManager mocks in settingsManager tests 2025-10-06 08:24:58 +08:00
pixelpaws
cf375c7c86 Merge pull request #537 from willmiao/codex/implement-video-lazy-loading-with-queue
fix: throttle model card video lazy loading
2025-10-06 08:07:24 +08:00
pixelpaws
5e53d76f44 fix(model-card): throttle preview video loading 2025-10-06 07:45:51 +08:00
Will Miao
7757f72859 Enhance test for saving paths to ensure cross-platform compatibility in folder paths 2025-10-05 22:40:43 +08:00
pixelpaws
c8cc584049 Merge pull request #536 from willmiao/codex/add-unit-tests-for-config-saving-paths
Add regression tests for Config save path handling
2025-10-05 22:24:57 +08:00
pixelpaws
2cdd269bba Merge pull request #535 from willmiao/codex/add-tests-for-migration-utility-functions
test: cover example images migration flows
2025-10-05 22:24:42 +08:00
pixelpaws
d2d97ae5bb Merge pull request #534 from willmiao/codex/add-tests-for-cache-middleware
test: add cache control middleware coverage
2025-10-05 22:24:26 +08:00
pixelpaws
d08d77c555 Merge pull request #533 from willmiao/codex/add-async-test-module-for-lifecycle
test: add LoRA manager lifecycle coverage
2025-10-05 22:24:12 +08:00
pixelpaws
92f8d2139a Merge pull request #532 from willmiao/codex/create-tests-for-statsroutes
test: add coverage for stats routes endpoints
2025-10-05 22:23:59 +08:00
pixelpaws
50f2c2dfe6 test(config): cover save path migration flows 2025-10-05 22:19:36 +08:00
pixelpaws
3539c453d3 test(utils): cover example images migrations 2025-10-05 22:19:29 +08:00
pixelpaws
1631122f95 test(middleware): add cache control coverage 2025-10-05 22:19:20 +08:00
pixelpaws
8fcb979544 test(routes): cover lora manager lifecycle 2025-10-05 22:19:10 +08:00
pixelpaws
8a5af0b7f3 test(routes): add stats routes coverage 2025-10-05 22:18:59 +08:00
Will Miao
cb1f08d556 Fix low contrast on nav-item hover 2025-10-05 21:10:01 +08:00
pixelpaws
1150267765 Merge pull request #531 from willmiao/codex/-activelibrary
fix(locales): translate library selection strings
2025-10-05 21:04:10 +08:00
pixelpaws
5c1252548d fix(locales): translate library selection strings 2025-10-05 21:03:21 +08:00
Will Miao
3c7cdf5db8 feat(header): enhance navigation and search functionality with context-aware behavior 2025-10-05 20:58:14 +08:00
Will Miao
9ac4203b1c test(example-images): allow monkeypatching os.startfile on linux 2025-10-05 16:30:44 +08:00
pixelpaws
d0800510db Merge pull request #529 from willmiao/codex/update-file-url-formatting-methods
fix: route recipe previews through preview API
2025-10-05 15:53:26 +08:00
pixelpaws
f8ba551cc4 fix(recipes): use preview endpoint for recipe images 2025-10-05 15:49:18 +08:00
Will Miao
413444500e refactor(model_scanner): normalize path comparisons for model roots
fix(example_images_download_manager): re-raise specific exception on download error

refactor(usage_stats): define constants locally to avoid conditional imports

test(example_images_download_manager): update exception handling in download tests

test(example_images_file_manager): differentiate between os.startfile and subprocess.Popen in tests

test(example_images_paths): ensure valid example images root with single-library mode

test(usage_stats): use string literals for metadata payload to avoid conditional imports
2025-10-05 15:48:50 +08:00
pixelpaws
e21d5835ec Merge pull request #524 from willmiao/codex/add-async-tests-for-websocketmanager
Add websocket broadcast and usage stats tests
2025-10-05 15:19:40 +08:00
pixelpaws
f2f354e478 Merge pull request #528 from willmiao/codex/add-bulk-action-set-content-rating, fixes #428
Add bulk content rating update action
2025-10-05 15:19:19 +08:00
pixelpaws
b195d4569c test(usage-stats): allow metadata registry monkeypatch 2025-10-05 15:13:20 +08:00
pixelpaws
3b77fed72d feat(bulk): add bulk content rating action 2025-10-05 15:09:43 +08:00
pixelpaws
fc64e97f92 Merge pull request #525 from willmiao/codex/develop-tests-for-metadatasyncservice-and-modelscanner
Add coverage for metadata sync service and scanner reconciliation
2025-10-05 15:03:36 +08:00
pixelpaws
1da0434454 Merge pull request #526 from willmiao/codex/add-tests-for-utility-functions
test: add coverage for utility helpers
2025-10-05 15:03:20 +08:00
pixelpaws
cf2fe40612 Merge pull request #527 from willmiao/codex/add-unit-tests-for-metadata-components
Add metadata collector unit tests and fixtures
2025-10-05 15:03:04 +08:00
pixelpaws
8f46433ff7 Merge pull request #523 from willmiao/codex/add-tests-for-settingsmanager-migration
Add tests for settings migrations and service registry lazy loading
2025-10-05 15:02:44 +08:00
pixelpaws
f3be3ae269 Merge pull request #522 from willmiao/codex/add-tests-for-example-images-pipeline
test: add example images route and utility coverage
2025-10-05 15:02:07 +08:00
pixelpaws
cfec5447d3 test(metadata): add collector coverage 2025-10-05 14:44:17 +08:00
pixelpaws
2d36b461cf test(utils): add coverage for helper utilities 2025-10-05 14:43:21 +08:00
pixelpaws
5e23e4b13d test(metadata): cover sync service and scanner reconciliation 2025-10-05 14:42:13 +08:00
pixelpaws
badae2e8b3 test: cover websocket broadcasts and usage stats 2025-10-05 14:41:47 +08:00
pixelpaws
9e64531de6 test(settings): cover migrations and registry lazy loading 2025-10-05 14:41:24 +08:00
pixelpaws
fdec8d283c test(example-images): expand coverage for routes and utilities 2025-10-05 14:40:48 +08:00
Will Miao
9abedbf7cb fix(metadata-sync): improve error handling for deleted CivitAI models, fixes #497 2025-10-05 11:05:52 +08:00
pixelpaws
66004c1cdc Merge pull request #520 from willmiao/codex/adjust-example-images-download-to-use-library-name
fix: keep example image downloads in initial library
2025-10-05 10:08:26 +08:00
pixelpaws
5b564cd8a3 fix(example-images): pin downloads to start library 2025-10-05 09:10:25 +08:00
pixelpaws
2e79970e6e Merge pull request #519 from willmiao/codex/update-example-images-download-flow
fix: reuse migrated example image folders before download
2025-10-05 09:04:06 +08:00
pixelpaws
67c82ba6ea fix(example-images): reuse migrated folders during downloads 2025-10-05 08:37:11 +08:00
Will Miao
98425f37b8 fix(download-manager): improve handling of civitai payloads to avoid empty dictionaries 2025-10-05 07:29:11 +08:00
Will Miao
9d22dd3465 fix(model-library): update response structure to return model versions directly 2025-10-04 22:06:33 +08:00
pixelpaws
837138db49 Merge pull request #517 from willmiao/codex/determine-example_images_path-structure
feat: namespace example image storage by library
2025-10-04 20:42:15 +08:00
pixelpaws
d43d992362 feat(example-images): namespace storage by library 2025-10-04 20:29:49 +08:00
Will Miao
16b611cb7e Simplify settings file location and configuration 2025-10-04 18:42:53 +08:00
Will Miao
8dde2d5e0d feat(websocket-manager): implement caching for initialization progress and enhance broadcast functionality 2025-10-04 18:19:20 +08:00
Will Miao
22b0b2bd24 fix(model-card): correct query parameter handling in versioned preview URL 2025-10-04 17:32:16 +08:00
Will Miao
056f727bfd feat(model-scanner): enhance page type determination for model types 2025-10-04 17:08:02 +08:00
Will Miao
0aa6c53c1f feat(initialization): add support for embeddings page type and log progress updates 2025-10-04 14:11:27 +08:00
pixelpaws
d9b0660611 Merge pull request #516 from willmiao/codex/investigate-library-switching-functionality-issue
feat: serve dynamic preview assets after library switch
2025-10-04 10:51:52 +08:00
pixelpaws
d01666f4e2 feat(previews): serve dynamic library previews 2025-10-04 10:38:06 +08:00
Will Miao
51bee87cd0 fix(persistence): improve handling of lora_info attributes in recipe persistence 2025-10-04 09:52:53 +08:00
pixelpaws
3041b443e5 Merge pull request #515 from willmiao/codex/add-library-selection-in-settings-modal
Add tests for settings library switcher
2025-10-04 09:22:11 +08:00
pixelpaws
d95e6c939b test(settings): cover library switch workflow 2025-10-04 09:07:02 +08:00
pixelpaws
fd38c63b35 Merge pull request #514 from willmiao/codex/add-multi-library-endpoints-for-frontend
test: add coverage for settings library endpoints
2025-10-04 08:43:03 +08:00
pixelpaws
b69c24ae14 test(routes): cover settings library endpoints 2025-10-04 08:38:59 +08:00
pixelpaws
65a0c00e33 Merge pull request #513 from willmiao/codex/add-link-button-to-settings-modal-header
Adjust settings modal location shortcut styling
2025-10-04 07:57:43 +08:00
pixelpaws
b12a5ef133 style(settings): restyle settings location shortcut 2025-10-04 07:52:10 +08:00
pixelpaws
9e1b92c26e Merge pull request #512 from willmiao/codex/analyze-and-add-tests-for-misc-routes
test: improve misc routes coverage
2025-10-04 05:25:58 +08:00
pixelpaws
3922aec36e test(routes): extend misc routes coverage 2025-10-04 05:10:43 +08:00
pixelpaws
41cca8e56d Merge pull request #511 from willmiao/codex/refactor-misc_routes.py-and-add-tests
refactor: modularize misc route controller
2025-10-03 22:47:22 +08:00
pixelpaws
2d37a7341a fix(routes): await trained words extraction 2025-10-03 22:45:25 +08:00
pixelpaws
40e3c6134c refactor(routes): modularize misc route handling 2025-10-03 22:19:09 +08:00
pixelpaws
edddd47a1e Merge pull request #510 from willmiao/codex/update-default-library-folder-path-handling
fix: rename legacy default library to comfyui
2025-10-03 21:50:41 +08:00
pixelpaws
4ea6f38645 fix(config): rename legacy default library 2025-10-03 21:24:17 +08:00
Will Miao
40d998a026 fix(settings): use timezone-aware datetime for current timestamp
fix(tests): normalize stored paths in library upsert test
2025-10-03 20:59:47 +08:00
pixelpaws
3af8f151ac Merge pull request #509 from willmiao/codex/implement-features-from-multi-library-design
feat: add multi-library backend support
2025-10-03 20:37:59 +08:00
pixelpaws
e066fa6873 feat(settings): add multi-library backend support 2025-10-03 20:08:35 +08:00
Will Miao
6bd94269d4 refactor(model-scanner): remove unused model scanning methods 2025-10-03 18:58:02 +08:00
Will Miao
c90edec18a feat(multi-library): add design documentation for multi-library management in standalone mode 2025-10-03 18:34:52 +08:00
Will Miao
cbb302614c Merge branch 'main' of https://github.com/willmiao/ComfyUI-Lora-Manager 2025-10-03 15:23:30 +08:00
Will Miao
c54611a11b fix(metadata-service): change log level to debug for SQLite and fallback provider registration 2025-10-03 15:23:28 +08:00
pixelpaws
88f249649a Merge pull request #508 from willmiao/codex/sync-persistent-model-cache-metadata-updates
fix: sync persistent cache with metadata updates
2025-10-03 15:06:30 +08:00
pixelpaws
fe9fbdb93c fix(cache): sync persistent metadata updates 2025-10-03 14:57:44 +08:00
Will Miao
28bc966b76 feat(model-scanner): enhance cache loading with progress reporting and fallback to full scan 2025-10-03 14:31:08 +08:00
Will Miao
77bbf85b52 feat(persistent-cache): implement SQLite-based persistent model cache with loading and saving functionality 2025-10-03 11:00:51 +08:00
Will Miao
3b1990e97a feat(scanner): enhance model scanning with cache build result and progress reporting 2025-10-02 21:25:09 +08:00
Will Miao
375b5a49f3 fix(config): update standalone mode environment variable usage 2025-10-02 09:40:24 +08:00
pixelpaws
392c157cb5 Merge pull request #503 from willmiao/codex/add-hebrew-locale-to-tests
fix(i18n): add hebrew locale coverage
2025-09-30 22:07:52 +08:00
pixelpaws
6f5bf4b582 fix(i18n): add hebrew locale coverage 2025-09-30 22:03:44 +08:00
pixelpaws
2e3f48ebb7 Merge pull request #426 from start-life/main
Adding Hebrew language
2025-09-30 21:51:50 +08:00
Will Miao
e4a2c518bb fix(preset): update filePath retrieval method in removePreset function 2025-09-30 21:41:30 +08:00
pixelpaws
f19fb68b4c Merge pull request #501 from willmiao/codex/update-downloadmanager-to-handle-multiple-download-urls
feat: retry mirror downloads sequentially
2025-09-30 17:17:34 +08:00
pixelpaws
9121c12a2c feat(download): retry mirror urls sequentially 2025-09-30 17:14:59 +08:00
Will Miao
d0fe28cfe2 fix(recipe): validate modelVersionId before fetching hash from cache or Civitai 2025-09-28 09:18:59 +08:00
Will Miao
656e3e43be fix(imports): update import paths for ensure_settings_file to use relative imports 2025-09-28 08:40:09 +08:00
pixelpaws
c2c1772371 Merge pull request #496 from willmiao/codex/find-best-practices-for-settings-file-storage
feat(settings): persist settings in user config directory
2025-09-28 07:06:02 +08:00
pixelpaws
88d5caf642 feat(settings): migrate settings to user config dir 2025-09-27 22:22:15 +08:00
pixelpaws
1684978693 Merge pull request #491 from willmiao/codex/replace-spaces-in-embedding-paths
Fix embedding relative paths by replacing spaces
2025-09-26 09:06:56 +08:00
pixelpaws
8e4927600f fix(embeddings): replace spaces in relative paths 2025-09-26 09:02:46 +08:00
pixelpaws
4d72dc57e7 Merge pull request #490 from willmiao/codex/remove-comfy-field-from-images-metadata
fix: strip comfy metadata from civitai model images
2025-09-26 08:59:30 +08:00
pixelpaws
e7316b3389 fix(civitai): strip comfy metadata from images 2025-09-26 08:55:46 +08:00
start-life
e17b374606 Update zh-CN.json 2025-09-26 02:16:58 +03:00
Will Miao
141f83065f fix(locales): update language selection text in Chinese 2025-09-25 21:14:58 +08:00
pixelpaws
6381dbafc1 Merge pull request #488 from willmiao/codex/fix-bespoke-import_from-loader-issue
test: standardize backend package imports
2025-09-25 16:51:17 +08:00
pixelpaws
fc9db4510f test: fix duplicate pytest import 2025-09-25 16:44:43 +08:00
pixelpaws
66abf736c9 Merge pull request #487 from willmiao/codex/fix-lora-information-import-issue
fix: parse civitai image LoRAs from hash metadata
2025-09-25 16:00:48 +08:00
pixelpaws
af713470c1 fix(recipes): parse loras from civitai hashes 2025-09-25 15:59:44 +08:00
pixelpaws
93a51d2bcb Merge pull request #486 from willmiao/codex/enable-test-coverage-metrics-in-ci
ci: add pytest coverage reporting
2025-09-25 15:58:47 +08:00
pixelpaws
3f3e06de8a Fix pytest command in backend tests workflow 2025-09-25 15:57:59 +08:00
pixelpaws
7315aac9d8 ci: add pytest coverage reporting 2025-09-25 15:47:07 +08:00
pixelpaws
d933308a6f Merge pull request #485 from willmiao/codex/expand-vitest-coverage-for-frontend-components
test(frontend): extend coverage for comfyui widgets and helpers
2025-09-25 15:37:48 +08:00
pixelpaws
3baf93dcc5 test(frontend): extend coverage for comfyui widgets and helpers 2025-09-25 15:32:25 +08:00
pixelpaws
6ba14bd8fe Merge pull request #484 from willmiao/codex/fix-update-check-in-offline-mode
Fix offline update logging and respect update notification toggle
2025-09-25 14:54:28 +08:00
pixelpaws
7499570766 fix(updates): avoid network stack traces offline 2025-09-25 14:50:06 +08:00
start-life
003ee55a75 Merge branch 'main' into main 2025-09-25 09:28:30 +03:00
pixelpaws
b0cc42ef1f Merge pull request #483 from willmiao/codex/introduce-integration-and-contract-tests
test: add aiohttp integration coverage for ServiceRegistry
2025-09-25 14:23:53 +08:00
pixelpaws
23679ec3f5 chore(tests): clean integration route header 2025-09-25 14:17:45 +08:00
Will Miao
da52e5b9dd fix(settings): improve logic for auto-setting default root paths based on folder presence 2025-09-25 10:56:09 +08:00
Will Miao
c4e357793f Merge branch 'main' of https://github.com/willmiao/ComfyUI-Lora-Manager 2025-09-25 10:43:14 +08:00
Will Miao
6c3424029c fix(recipe image): optimize image saving and update PNG metadata handling, fixes #481 2025-09-25 10:43:10 +08:00
pixelpaws
dd9e6a5b69 Merge pull request #482 from willmiao/codex/add-pytest-modules-for-untested-services
Add backend service and route test coverage
2025-09-25 09:48:27 +08:00
pixelpaws
095320ef72 test(routes): tidy lora route test imports 2025-09-25 09:40:25 +08:00
pixelpaws
35f7674bcd Merge pull request #480 from willmiao/codex/modularize-i18n-tests-into-smaller-cases
test(i18n): modularize translation validation
2025-09-25 09:25:06 +08:00
pixelpaws
26b36c123d test(i18n): modularize translation validation 2025-09-25 09:21:08 +08:00
Will Miao
c85e694c1d docs: update repository guidelines for clarity and consistency 2025-09-25 08:36:15 +08:00
Will Miao
ec05282db6 fix(coverage): improve Vitest CLI path handling and error checking 2025-09-25 08:11:48 +08:00
pixelpaws
3d6f9b226f Merge pull request #479 from willmiao/codex/execute-phase-5-tasks-and-update-roadmap
Add frontend coverage workflow and reporting script
2025-09-25 07:00:04 +08:00
pixelpaws
eda6df4a5d chore(ci): add frontend coverage workflow 2025-09-24 23:22:32 +08:00
Will Miao
d504f89f6a Merge branch 'main' of https://github.com/willmiao/ComfyUI-Lora-Manager 2025-09-24 21:11:43 +08:00
Will Miao
14c468f2a2 feat(video): enhance video handling in model cards with lazy loading and autoplay settings, see #446 2025-09-24 21:11:36 +08:00
pixelpaws
2a99b0e46f Merge pull request #478 from willmiao/codex/execute-phase-4-tasks-from-roadmap
Add interaction-level frontend regression tests
2025-09-24 20:38:51 +08:00
pixelpaws
ae8914f5c8 test(frontend): add interaction regression suites 2025-09-24 20:33:41 +08:00
pixelpaws
0c9f8971ce Merge pull request #477 from willmiao/codex/execute-phase-3-tasks-and-update-roadmap
test: add embeddings and recipes manager suites
2025-09-24 20:18:37 +08:00
pixelpaws
d7a75ea4e5 test(frontend): cover embeddings and recipes managers 2025-09-24 20:15:38 +08:00
pixelpaws
3ad8d8b17c Merge pull request #476 from willmiao/codex/plan-next-tasks-based-on-roadmap-juork7
test(frontend): cover filtering flows for lora and checkpoints
2025-09-24 17:54:50 +08:00
pixelpaws
39225dc204 test(frontend): add filtering coverage for model pages 2025-09-24 17:50:04 +08:00
pixelpaws
4fb69f7d89 Merge pull request #475 from willmiao/codex/plan-next-tasks-based-on-roadmap-gpw3fe
Add checkpoints page smoke tests
2025-09-24 17:22:22 +08:00
pixelpaws
0890c6ad24 test(frontend): add checkpoints manager smoke tests 2025-09-24 17:18:20 +08:00
pixelpaws
dd81809589 Merge pull request #474 from willmiao/codex/plan-next-steps-for-roadmap-tasks
test(frontend): add loras page manager suite
2025-09-24 17:09:38 +08:00
pixelpaws
f0672beb46 test(frontend): add loras page manager suite 2025-09-24 16:22:17 +08:00
pixelpaws
cc5301e710 Merge pull request #473 from willmiao/codex/plan-next-tasks-based-on-roadmap
fix(frontend): validate AppCore initialization wiring
2025-09-24 16:15:05 +08:00
pixelpaws
9d5ec43c4e fix(frontend): correct AppCore example images initialization 2025-09-24 16:10:27 +08:00
pixelpaws
6d41211b07 Merge pull request #472 from willmiao/codex/plan-and-update-frontend-testing-roadmap
Add AppCore page orchestration tests
2025-09-24 15:56:17 +08:00
pixelpaws
d58b61eed5 test(frontend): cover appcore page features 2025-09-24 15:55:50 +08:00
pixelpaws
4b53d98bfc Merge pull request #471 from willmiao/codex/organize-frontend-tests-into-new-directories
test(frontend): document dom fixture workflow
2025-09-24 15:44:23 +08:00
pixelpaws
f51f354e48 test(frontend): add dom fixture helpers 2025-09-24 15:39:52 +08:00
Will Miao
59d027181d refactor: remove obsolete JSON files and add new version metadata for LORA model 2025-09-24 10:56:06 +08:00
Will Miao
0d0988c090 feat: add functionality to attach model files to version data in SQLiteModelMetadataProvider 2025-09-24 10:55:55 +08:00
Will Miao
dc2de50924 bump: update version to 0.9.5 in pyproject.toml 2025-09-24 09:16:50 +08:00
Will Miao
12c88835f2 refactor: enhance model version retrieval logic in CivitaiClient, fixes #460 2025-09-24 09:16:02 +08:00
Will Miao
6f4453aaf3 refactor: remove storage migration logic and associated tests 2025-09-24 06:04:08 +08:00
Will Miao
4b4b8fe3c1 refactor: remove unused ModelRouteUtils class and its methods 2025-09-24 05:41:30 +08:00
pixelpaws
49e7c2e9f5 Merge pull request #470 from willmiao/codex/add-tests-for-storagehelpers-and-appcore
test: add vitest coverage for storage helpers and core
2025-09-24 05:21:02 +08:00
pixelpaws
4653c273e3 test(frontend): add storage and core initialization specs 2025-09-24 05:20:39 +08:00
Will Miao
ae145de2f2 feat(tests): add frontend automated testing setup with Vitest and jsdom 2025-09-23 23:05:55 +08:00
Will Miao
dde7cf71c6 fix(locales): translate global context menu entries for downloading example images 2025-09-23 22:24:57 +08:00
pixelpaws
219cd242db Merge pull request #467 from willmiao/codex/migrate-frontend-settings-to-backend
feat(settings): centralize settings loading and snake_case keys
2025-09-23 22:16:10 +08:00
pixelpaws
e5b712c082 fix(i18n): sync language with state settings 2025-09-23 22:00:31 +08:00
pixelpaws
4d2c60d59b fix(settings): persist language preference 2025-09-23 22:00:20 +08:00
pixelpaws
1d2c1b114b Merge pull request #469 from willmiao/codex/add-download-example-images-to-global-context-menu
feat: add download example images to global context menu
2025-09-23 20:58:35 +08:00
pixelpaws
2bde936d05 Merge pull request #468 from willmiao/codex/migrate-i18n-tests-to-tests-framework
Migrate i18n test script into pytest suite
2025-09-23 20:58:14 +08:00
pixelpaws
cd3e32bf4b feat(context-menu): add example image download entry 2025-09-23 20:49:44 +08:00
pixelpaws
454536d631 test(i18n): migrate localization tests into pytest suite 2025-09-23 20:47:33 +08:00
Will Miao
656f1755fd feat: add cleanup example image folders functionality and UI integration 2025-09-23 20:35:35 +08:00
pixelpaws
8aa76ce5c1 feat(settings): centralize frontend settings on backend 2025-09-23 20:28:32 +08:00
Will Miao
49fa37f00d Merge branch 'main' of https://github.com/willmiao/ComfyUI-Lora-Manager 2025-09-23 19:31:19 +08:00
pixelpaws
9f83548cf3 Merge pull request #466 from willmiao/refactor
Refactor
2025-09-23 19:29:54 +08:00
pixelpaws
6054d95e85 Update py/services/model_query.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-23 19:25:12 +08:00
pixelpaws
8c9bb35824 Update tests/services/test_base_model_service.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-23 19:25:02 +08:00
Will Miao
3eacf9558a docs: remove outdated developer notes and add example image route architecture documentation 2025-09-23 15:39:56 +08:00
pixelpaws
fee37172b4 Merge pull request #465 from willmiao/codex/add-async-tests-for-concurrent-behavior
Add async tests for example image download concurrency
2025-09-23 15:00:31 +08:00
pixelpaws
e128c80eb1 test(services): add async example image download tests 2025-09-23 14:58:35 +08:00
pixelpaws
5cc735ed57 Merge pull request #464 from willmiao/codex/refactor-websocket-integration-for-downloading
refactor: align example image downloads with websocket manager
2025-09-23 14:43:37 +08:00
pixelpaws
43fcce6361 refactor(example-images): inject websocket manager 2025-09-23 14:40:43 +08:00
pixelpaws
49b7126278 Merge pull request #463 from willmiao/codex/refactor-downloadmanager-to-instance-based
refactor: convert example image download manager to service instance
2025-09-23 13:08:08 +08:00
pixelpaws
679cfb5c69 refactor(example-images): encapsulate download manager state 2025-09-23 13:07:11 +08:00
pixelpaws
50616bc680 Merge pull request #462 from willmiao/codex/wrap-long-running-flows-in-use-cases
feat(example-images): add use case orchestration
2025-09-23 11:55:06 +08:00
pixelpaws
aaad270822 feat(example-images): add use case orchestration 2025-09-23 11:47:12 +08:00
pixelpaws
bd10280736 Merge pull request #461 from willmiao/codex/refactor-example-images-routes
Add regression tests for example images routes
2025-09-23 11:31:22 +08:00
Will Miao
d477050239 feat: add global context menu with actions and integration 2025-09-23 11:19:36 +08:00
pixelpaws
85f79cd8d1 refactor(routes): introduce example images controller 2025-09-23 11:12:08 +08:00
pixelpaws
613cd81152 refactor(routes): add registrar for example images 2025-09-23 11:12:05 +08:00
pixelpaws
e0aba6c49a test(example-images): add route regression harness 2025-09-23 10:41:56 +08:00
pixelpaws
d78bcf2494 Merge pull request #459 from willmiao/codex/complete-refactor-with-tests-and-documentation
Add recipe route integration tests and update architecture docs
2025-09-22 14:19:41 +08:00
pixelpaws
f7cffd2eba test(recipes): add route smoke tests and docs 2025-09-22 14:15:24 +08:00
pixelpaws
0d0b91aa80 Merge pull request #458 from willmiao/codex/expose-first-class-operations-on-recipescanner
feat: expose recipe scanner mutation APIs
2025-09-22 13:54:49 +08:00
pixelpaws
42872e6d2d feat(recipes): expose recipe scanner mutation apis 2025-09-22 13:45:40 +08:00
Will Miao
b91f06405d feat: support clip strength in LoRA usage tips, fixes #401 2025-09-22 13:42:36 +08:00
pixelpaws
dac4c688d6 Merge pull request #456 from willmiao/codex/refactor-http-handlers-with-recipe-services
Refactor recipe handlers to use dedicated services
2025-09-22 13:30:05 +08:00
pixelpaws
097a68ad18 refactor(recipes): introduce dedicated services for handlers 2025-09-22 13:25:21 +08:00
pixelpaws
4a98710db0 Merge pull request #455 from willmiao/codex/refactor-reciperoutes-into-handler-objects
refactor: split recipe routes into dedicated handlers
2025-09-22 13:02:39 +08:00
pixelpaws
d033a374dd refactor(routes): split recipe handlers into dedicated classes 2025-09-22 12:57:37 +08:00
pixelpaws
6aa23fe36a Merge pull request #454 from willmiao/codex/migrate-recipe-http-layer-architecture
Refactor recipe routes to use registrar scaffolding
2025-09-22 12:42:37 +08:00
pixelpaws
3220cfb79c test(recipe-routes): add scaffolding baseline 2025-09-22 12:41:37 +08:00
pixelpaws
b92e7aa446 chore(routes): dedupe os import 2025-09-22 12:15:12 +08:00
Will Miao
c3b9c73541 refactor: remove ModelRouteUtils usage and implement filtering directly in services 2025-09-22 09:09:40 +08:00
pixelpaws
81c6672880 Merge pull request #453 from willmiao/codex/evaluate-need-for-further-refactoring
refactor: migrate model lifecycle handlers to dedicated service
2025-09-22 08:37:40 +08:00
pixelpaws
08baf884d3 refactor(routes): migrate lifecycle mutations to service 2025-09-22 08:28:30 +08:00
Will Miao
1c4096f3d5 test(routes): add tests for service readiness and error handling in download model 2025-09-22 06:28:30 +08:00
Will Miao
66a3f3f59a refactor(tests): enhance async test handling in pytest_pyfunc_call 2025-09-22 05:37:24 +08:00
pixelpaws
624df1328b Merge pull request #452 from willmiao/codex/create-application-level-use-case-services
feat(routes): extract orchestration use cases
2025-09-22 05:27:19 +08:00
pixelpaws
c063854b51 feat(routes): extract orchestration use cases 2025-09-22 05:25:27 +08:00
Will Miao
8cf99dd928 refactor(tests): remove deprecated test runner script 2025-09-21 23:39:21 +08:00
pixelpaws
c07e885725 Merge pull request #451 from willmiao/codex/refactor-routes_common.py-into-services
Refactor model route utilities into dedicated services
2025-09-21 23:38:38 +08:00
pixelpaws
21772feadd refactor(routes): extract route utilities into services 2025-09-21 23:34:46 +08:00
Will Miao
2d00cfdd31 refactor: enhance BaseModelService initialization and improve filtering logic 2025-09-21 23:13:30 +08:00
pixelpaws
49e03d658b Merge pull request #450 from willmiao/codex/refactor-basemodelroutes-for-better-separation
refactor(routes): extract registrar and handlers
2025-09-21 22:46:13 +08:00
Will Miao
fec85bcc08 refactor: unify standalone mode check using environment variable 2025-09-21 22:45:11 +08:00
pixelpaws
0e93a6bcb0 refactor(routes): extract registrar and handlers 2025-09-21 20:52:08 +08:00
pixelpaws
7e20f738fb Merge pull request #449 from willmiao/codex/document-and-map-basemodelroutes-contracts
docs(routes): map base model dependencies and contracts
2025-09-21 20:40:44 +08:00
pixelpaws
24090e6077 docs(routes): map base model dependencies and contracts 2025-09-21 20:34:45 +08:00
Will Miao
1022b07f64 feat: enhance model metadata provider with import error handling and mock setup for tests 2025-09-21 19:57:49 +08:00
pixelpaws
4faf912c6f Merge pull request #448 from willmiao/codex/create-documentation-and-tests-for-model-routes
test(routes): add base model route smoke coverage
2025-09-21 17:19:52 +08:00
pixelpaws
56e4b24b07 test(routes): clean smoke test module 2025-09-21 17:15:24 +08:00
Will Miao
12295d2fdc feat(docs): add comprehensive repository guidelines and project structure documentation 2025-09-21 16:40:07 +08:00
Will Miao
6261f7d18d Update LM extension wiki 2025-09-20 23:21:10 +08:00
start-life
a3c28c1003 Update zh-TW.json 2025-09-12 03:33:34 +03:00
start-life
f4b7c9a138 Update zh-CN.json 2025-09-12 03:33:09 +03:00
start-life
6b860b5f29 Update ru.json 2025-09-12 03:32:15 +03:00
start-life
37dfcd6abd Update ko.json 2025-09-12 03:31:57 +03:00
start-life
bc2fca3a4f Update ja.json 2025-09-12 03:31:35 +03:00
start-life
f8ef159656 Update fr.json 2025-09-12 03:31:12 +03:00
start-life
b2b8a9d37e Update es.json 2025-09-12 03:30:56 +03:00
start-life
15ae4031b7 Update de.json 2025-09-12 03:30:39 +03:00
start-life
688976ce3b Update en.json 2025-09-12 03:30:14 +03:00
start-life
a548af01dc Update settings_modal.html
Adding Hebrew language
2025-09-12 03:28:45 +03:00
start-life
0dd52eceb3 Update index.js
Adding Hebrew language
2025-09-12 03:26:08 +03:00
start-life
b8c6cf4ac1 Add files via upload
Adding Hebrew language
2025-09-12 03:20:38 +03:00
220 changed files with 28429 additions and 9318 deletions

69
.github/workflows/backend-tests.yml vendored Normal file
View File

@@ -0,0 +1,69 @@
name: Backend Tests
on:
push:
branches:
- main
- master
paths:
- 'py/**'
- 'standalone.py'
- 'tests/**'
- 'requirements.txt'
- 'requirements-dev.txt'
- 'pyproject.toml'
- 'pytest.ini'
- '.github/workflows/backend-tests.yml'
pull_request:
paths:
- 'py/**'
- 'standalone.py'
- 'tests/**'
- 'requirements.txt'
- 'requirements-dev.txt'
- 'pyproject.toml'
- 'pytest.ini'
- '.github/workflows/backend-tests.yml'
jobs:
pytest:
name: Run pytest with coverage
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
requirements.txt
requirements-dev.txt
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
- name: Run pytest with coverage
env:
COVERAGE_FILE: coverage/backend/.coverage
run: |
mkdir -p coverage/backend
python -m pytest \
--cov=py \
--cov=standalone \
--cov-report=term-missing \
--cov-report=xml:coverage/backend/coverage.xml \
--cov-report=html:coverage/backend/html \
--cov-report=json:coverage/backend/coverage.json
- name: Upload coverage artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: backend-coverage
path: coverage/backend
if-no-files-found: warn

52
.github/workflows/frontend-tests.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
name: Frontend Tests
on:
push:
branches:
- main
- master
paths:
- 'package.json'
- 'package-lock.json'
- 'vitest.config.js'
- 'tests/frontend/**'
- 'static/js/**'
- 'scripts/run_frontend_coverage.js'
- '.github/workflows/frontend-tests.yml'
pull_request:
paths:
- 'package.json'
- 'package-lock.json'
- 'vitest.config.js'
- 'tests/frontend/**'
- 'static/js/**'
- 'scripts/run_frontend_coverage.js'
- '.github/workflows/frontend-tests.yml'
jobs:
vitest:
name: Run Vitest with coverage
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run frontend tests with coverage
run: npm run test:coverage
- name: Upload coverage artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: frontend-coverage
path: coverage/frontend
if-no-files-found: warn

3
.gitignore vendored
View File

@@ -6,3 +6,6 @@ py/run_test.py
.vscode/
cache/
civitai/
node_modules/
coverage/
.coverage

22
AGENTS.md Normal file
View File

@@ -0,0 +1,22 @@
# Repository Guidelines
## Project Structure & Module Organization
ComfyUI LoRA Manager pairs a Python backend with browser-side widgets. Backend modules live in <code>py/</code> with HTTP entry points in <code>py/routes/</code>, feature logic in <code>py/services/</code>, shared helpers in <code>py/utils/</code>, and custom nodes in <code>py/nodes/</code>. UI scripts extend ComfyUI from <code>web/comfyui/</code>, while deploy-ready assets remain in <code>static/</code> and <code>templates/</code>. Localization files live in <code>locales/</code>, example workflows in <code>example_workflows/</code>, and interim tests such as <code>test_i18n.py</code> sit beside their source until a dedicated <code>tests/</code> tree lands.
## Build, Test, and Development Commands
- <code>pip install -r requirements.txt</code> installs backend dependencies.
- <code>python standalone.py --port 8188</code> launches the standalone server for iterative development.
- <code>python -m pytest test_i18n.py</code> runs the current regression suite; target new files explicitly, e.g. <code>python -m pytest tests/test_recipes.py</code>.
- <code>python scripts/sync_translation_keys.py</code> synchronizes locale keys after UI string updates.
## Coding Style & Naming Conventions
Follow PEP 8 with four-space indentation and descriptive snake_case file and function names such as <code>settings_manager.py</code>. Classes stay PascalCase, constants in UPPER_SNAKE_CASE, and loggers retrieved via <code>logging.getLogger(__name__)</code>. Prefer explicit type hints and docstrings on public APIs. JavaScript under <code>web/comfyui/</code> uses ES modules with camelCase helpers and the <code>_widget.js</code> suffix for UI components.
## Testing Guidelines
Pytest powers backend tests. Name modules <code>test_<feature>.py</code> and keep them near the code or in a future <code>tests/</code> package. Mock ComfyUI dependencies through helpers in <code>standalone.py</code>, keep filesystem fixtures deterministic, and ensure translations are covered. Run <code>python -m pytest</code> before submitting changes.
## Commit & Pull Request Guidelines
Commits follow the conventional format, e.g. <code>feat(settings): add default model path</code>, and should stay focused on a single concern. Pull requests must outline the problem, summarize the solution, list manual verification steps (server run, targeted pytest), and link related issues. Include screenshots or GIFs for UI or locale updates and call out migration steps such as <code>settings.json</code> adjustments.
## Configuration & Localization Tips
Copy <code>settings.json.example</code> to <code>settings.json</code> and adapt model directories before running the standalone server. Store reference assets in <code>civitai/</code> or <code>docs/</code> to keep runtime directories deploy-ready. Whenever UI text changes, update every <code>locales/&lt;lang&gt;.json</code> file and rerun the translation sync script so ComfyUI surfaces localized strings.

View File

@@ -34,6 +34,14 @@ Enhance your Civitai browsing experience with our companion browser extension! S
## Release Notes
### v0.9.6
* **Critical Performance Optimization** - Introduced persistent model cache that dramatically accelerates initialization after startup and significantly reduces Python backend memory footprint for improved application performance.
* **Cross-Browser Settings Synchronization** - Migrated nearly all settings to the backend, ensuring your preferences sync automatically across all browsers for a seamless multi-browser experience.
* **Protected User Settings Location** - Relocated user settings (settings.json) to the user config directory (accessible via the link icon in Settings), preventing accidental deletion during reinstalls or updates.
* **Global Context Menu** - Added a new global context menu accessible by right-clicking on empty page areas, providing quick access to global operations with more features coming in future updates.
* **Multi-Library Support** - Introduced support for managing multiple libraries, allowing you to easily switch between different model collections (advanced usage, documentation in progress).
* **Bug Fixes & Stability Improvements** - Various bug fixes and enhancements for improved stability and reliability.
### v0.9.3
* **Metadata Archive Database Support** - Added the ability to download and utilize a metadata archive database, enabling access to metadata for models that have been deleted from CivitAI.
* **App-Level Proxy Settings** - Introduced support for configuring a global proxy within the application, making it easier to use the manager behind network restrictions.
@@ -141,7 +149,7 @@ Enhance your Civitai browsing experience with our companion browser extension! S
1. Download the [Portable Package](https://github.com/willmiao/ComfyUI-Lora-Manager/releases/download/v0.9.2/lora_manager_portable.7z)
2. Copy the provided `settings.json.example` file to create a new file named `settings.json` in `comfyui-lora-manager` folder
3. Edit `settings.json` to include your correct model folder paths and CivitAI API key
3. Edit the new `settings.json` to include your correct model folder paths and CivitAI API key
4. Run run.bat
- To change the startup port, edit `run.bat` and modify the parameter (e.g. `--port 9001`)
@@ -209,7 +217,7 @@ You can combine multiple patterns to create detailed, organized filenames for yo
You can now run LoRA Manager independently from ComfyUI:
1. **For ComfyUI users**:
- Launch ComfyUI with LoRA Manager at least once to initialize the necessary path information in the `settings.json` file.
- Launch ComfyUI with LoRA Manager at least once to initialize the necessary path information in the `settings.json` file located in your user settings folder (see paths above).
- Make sure dependencies are installed: `pip install -r requirements.txt`
- From your ComfyUI root directory, run:
```bash
@@ -231,8 +239,37 @@ You can now run LoRA Manager independently from ComfyUI:
```
- Access the interface through your browser at: `http://localhost:8188/loras`
> **Note:** Existing installations automatically migrate the legacy `settings.json` from the plugin folder to the user settings directory the first time you launch this version.
This standalone mode provides a lightweight option for managing your model and recipe collection without needing to run the full ComfyUI environment, making it useful even for users who primarily use other stable diffusion interfaces.
## Testing & Coverage
### Backend
Install the development dependencies and run pytest with coverage reports:
```bash
pip install -r requirements-dev.txt
COVERAGE_FILE=coverage/backend/.coverage pytest \
--cov=py \
--cov=standalone \
--cov-report=term-missing \
--cov-report=html:coverage/backend/html \
--cov-report=xml:coverage/backend/coverage.xml \
--cov-report=json:coverage/backend/coverage.json
```
HTML, XML, and JSON artifacts are stored under `coverage/backend/` so you can inspect hot spots locally or from CI artifacts.
### Frontend
Run the Vitest coverage suite to analyze widget hot spots:
```bash
npm run test:coverage
```
---
## Contributing

View File

@@ -1,13 +1,32 @@
from .py.lora_manager import LoraManager
from .py.nodes.lora_loader import LoraManagerLoader, LoraManagerTextLoader
from .py.nodes.trigger_word_toggle import TriggerWordToggle
from .py.nodes.lora_stacker import LoraStacker
from .py.nodes.save_image import SaveImage
from .py.nodes.debug_metadata import DebugMetadata
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
# Import metadata collector to install hooks on startup
from .py.metadata_collector import init as init_metadata_collector
try: # pragma: no cover - import fallback for pytest collection
from .py.lora_manager import LoraManager
from .py.nodes.lora_loader import LoraManagerLoader, LoraManagerTextLoader
from .py.nodes.trigger_word_toggle import TriggerWordToggle
from .py.nodes.lora_stacker import LoraStacker
from .py.nodes.save_image import SaveImage
from .py.nodes.debug_metadata import DebugMetadata
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
from .py.metadata_collector import init as init_metadata_collector
except ImportError: # pragma: no cover - allows running under pytest without package install
import importlib
import pathlib
import sys
package_root = pathlib.Path(__file__).resolve().parent
if str(package_root) not in sys.path:
sys.path.append(str(package_root))
LoraManager = importlib.import_module("py.lora_manager").LoraManager
LoraManagerLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerLoader
LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader
TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
SaveImage = importlib.import_module("py.nodes.save_image").SaveImage
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect
WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText
init_metadata_collector = importlib.import_module("py.metadata_collector").init
NODE_CLASS_MAPPINGS = {
LoraManagerLoader.NAME: LoraManagerLoader,

View File

@@ -1,6 +1,9 @@
## Overview
The **LoRA Manager Civitai Extension** is a Browser extension designed to work seamlessly with [LoRA Manager](https://github.com/willmiao/ComfyUI-Lora-Manager) to significantly enhance your browsing experience on [Civitai](https://civitai.com). With this extension, you can:
The **LoRA Manager Civitai Extension** is a Browser extension designed to work seamlessly with [LoRA Manager](https://github.com/willmiao/ComfyUI-Lora-Manager) to significantly enhance your browsing experience on [Civitai](https://civitai.com).
It also supports browsing on [CivArchive](https://civarchive.com/) (formerly CivitaiArchive).
With this extension, you can:
✅ Instantly see which models are already present in your local library
✅ Download new models with a single click
@@ -8,6 +11,7 @@ The **LoRA Manager Civitai Extension** is a Browser extension designed to work s
✅ Keep your downloaded models automatically organized according to your custom settings
![Civitai Models page](https://github.com/willmiao/ComfyUI-Lora-Manager/blob/main/wiki-images/civitai-models-page.png)
![CivArchive Models page](https://github.com/willmiao/ComfyUI-Lora-Manager/blob/main/wiki-images/civarchive-models-page.png)
---

View File

@@ -0,0 +1,93 @@
# Example image route architecture
The example image routing stack mirrors the layered model route stack described in
[`docs/architecture/model_routes.md`](model_routes.md). HTTP wiring, controller setup,
handler orchestration, and long-running workflows now live in clearly separated modules so
we can extend download/import behaviour without touching the entire feature surface.
```mermaid
graph TD
subgraph HTTP
A[ExampleImagesRouteRegistrar] -->|binds| B[ExampleImagesRoutes controller]
end
subgraph Application
B --> C[ExampleImagesHandlerSet]
C --> D1[Handlers]
D1 --> E1[Use cases]
E1 --> F1[Download manager / processor / file manager]
end
subgraph Side Effects
F1 --> G1[Filesystem]
F1 --> G2[Model metadata]
F1 --> G3[WebSocket progress]
end
```
## Layer responsibilities
| Layer | Module(s) | Responsibility |
| --- | --- | --- |
| Registrar | `py/routes/example_images_route_registrar.py` | Declarative catalogue of every example image endpoint plus helpers that bind them to an `aiohttp` router. Keeps HTTP concerns symmetrical with the model registrar. |
| Controller | `py/routes/example_images_routes.py` | Lazily constructs `ExampleImagesHandlerSet`, injects defaults for the download manager, processor, and file manager, and exposes the registrar-ready mapping just like `BaseModelRoutes`. |
| Handler set | `py/routes/handlers/example_images_handlers.py` | Groups HTTP adapters by concern (downloads, imports/deletes, filesystem access). Each handler translates domain errors into HTTP responses and defers to a use case or utility service. |
| Use cases | `py/services/use_cases/example_images/*.py` | Encapsulate orchestration for downloads and imports. They validate input, translate concurrency/configuration errors, and keep handler logic declarative. |
| Supporting services | `py/utils/example_images_download_manager.py`, `py/utils/example_images_processor.py`, `py/utils/example_images_file_manager.py` | Execute long-running work: pull assets from Civitai, persist uploads, clean metadata, expose filesystem actions with guardrails, and broadcast progress snapshots. |
## Handler responsibilities & invariants
`ExampleImagesHandlerSet` flattens the handler objects into the `{"handler_name": coroutine}`
mapping consumed by the registrar. The table below outlines how each handler collaborates
with the use cases and utilities.
| Handler | Key endpoints | Collaborators | Contracts |
| --- | --- | --- | --- |
| `ExampleImagesDownloadHandler` | `/api/lm/download-example-images`, `/api/lm/example-images-status`, `/api/lm/pause-example-images`, `/api/lm/resume-example-images`, `/api/lm/force-download-example-images` | `DownloadExampleImagesUseCase`, `DownloadManager` | Delegates payload validation and concurrency checks to the use case; progress/status endpoints expose the same snapshot used for WebSocket broadcasts; pause/resume surface `DownloadNotRunningError` as HTTP 400 instead of 500. |
| `ExampleImagesManagementHandler` | `/api/lm/import-example-images`, `/api/lm/delete-example-image` | `ImportExampleImagesUseCase`, `ExampleImagesProcessor` | Multipart uploads are streamed to disk via the use case; validation failures return HTTP 400 with no filesystem side effects; deletion funnels through the processor to prune metadata and cached images consistently. |
| `ExampleImagesFileHandler` | `/api/lm/open-example-images-folder`, `/api/lm/example-image-files`, `/api/lm/has-example-images` | `ExampleImagesFileManager` | Centralises filesystem access, enforcing settings-based root paths and returning HTTP 400/404 for missing configuration or folders; responses always include `success`/`has_images` booleans for UI consumption. |
## Use case boundaries
| Use case | Entry point | Dependencies | Guarantees |
| --- | --- | --- | --- |
| `DownloadExampleImagesUseCase` | `execute(payload)` | `DownloadManager.start_download`, download configuration errors | Raises `DownloadExampleImagesInProgressError` when the manager reports an active job, rewraps configuration errors into `DownloadExampleImagesConfigurationError`, and lets `ExampleImagesDownloadError` bubble as 500s so handlers do not duplicate logging. |
| `ImportExampleImagesUseCase` | `execute(request)` | `ExampleImagesProcessor.import_images`, temporary file helpers | Supports multipart or JSON payloads, normalises file paths into a single list, cleans up temp files even on failure, and maps validation issues to `ImportExampleImagesValidationError` for HTTP 400 responses. |
## Maintaining critical invariants
* **Shared progress snapshots** - The download handler returns the same snapshot built by
`DownloadManager`, guaranteeing parity between HTTP polling endpoints and WebSocket
progress events.
* **Safe filesystem access** - All folder/file actions flow through
`ExampleImagesFileManager`, which validates the configured example image root and ensures
responses never leak absolute paths outside the allowed directory.
* **Metadata hygiene** - Import/delete operations run through `ExampleImagesProcessor`,
which updates model metadata via `MetadataManager` and notifies the relevant scanners so
cache state stays in sync.
## Migration notes
The refactor brings the example image stack in line with the model/recipe stacks:
1. `ExampleImagesRouteRegistrar` now owns the declarative route list. Downstream projects
should rely on `ExampleImagesRoutes.to_route_mapping()` instead of manually wiring
handler callables.
2. `ExampleImagesRoutes` caches its `ExampleImagesHandlerSet` just like
`BaseModelRoutes`. If you previously instantiated handlers directly, inject custom
collaborators via the controller constructor (`download_manager`, `processor`,
`file_manager`) to keep test seams predictable.
3. Tests that mocked `ExampleImagesRoutes.setup_routes` should switch to patching
`DownloadExampleImagesUseCase`/`ImportExampleImagesUseCase` at import time. The handlers
expect those abstractions to surface validation/concurrency errors, and bypassing them
will skip the HTTP-friendly error mapping.
## Extending the stack
1. Add the endpoint to `ROUTE_DEFINITIONS` with a unique `handler_name`.
2. Expose the coroutine on an existing handler class (or create a new handler and extend
`ExampleImagesHandlerSet`).
3. Wire additional services or factories inside `_build_handler_set` on
`ExampleImagesRoutes`, mirroring how the model stack introduces new use cases.
`tests/routes/test_example_images_routes.py` exercises registrar binding, download pause
flows, and import validations. Use it as a template when introducing new handler
collaborators or error mappings.

View File

@@ -0,0 +1,100 @@
# Base model route architecture
The model routing stack now splits HTTP wiring, orchestration logic, and
business rules into discrete layers. The goal is to make it obvious where a
new collaborator should live and which contract it must honour. The diagram
below captures the end-to-end flow for a typical request:
```mermaid
graph TD
subgraph HTTP
A[ModelRouteRegistrar] -->|binds| B[BaseModelRoutes handler proxy]
end
subgraph Application
B --> C[ModelHandlerSet]
C --> D1[Handlers]
D1 --> E1[Use cases]
E1 --> F1[Services / scanners]
end
subgraph Side Effects
F1 --> G1[Cache & metadata]
F1 --> G2[Filesystem]
F1 --> G3[WebSocket state]
end
```
Every box maps to a concrete module:
| Layer | Module(s) | Responsibility |
| --- | --- | --- |
| Registrar | `py/routes/model_route_registrar.py` | Declarative list of routes shared by every model type and helper methods for binding them to an `aiohttp` application. |
| Route controller | `py/routes/base_model_routes.py` | Constructs the handler graph, injects shared services, exposes proxies that surface `503 Service not ready` when the model service has not been attached. |
| Handler set | `py/routes/handlers/model_handlers.py` | Thin HTTP adapters grouped by concern (page rendering, listings, mutations, queries, downloads, CivitAI integration, move operations, auto-organize). |
| Use cases | `py/services/use_cases/*.py` | Encapsulate long-running flows (`DownloadModelUseCase`, `BulkMetadataRefreshUseCase`, `AutoOrganizeUseCase`). They normalise validation errors and concurrency constraints before returning control to the handlers. |
| Services | `py/services/*.py` | Existing services and scanners that mutate caches, write metadata, move files, and broadcast WebSocket updates. |
## Handler responsibilities & contracts
`ModelHandlerSet` flattens the handler objects into the exact callables used by
the registrar. The table below highlights the separation of concerns within
the set and the invariants that must hold after each handler returns.
| Handler | Key endpoints | Collaborators | Contracts |
| --- | --- | --- | --- |
| `ModelPageView` | `/{prefix}` | `SettingsManager`, `server_i18n`, Jinja environment, `service.scanner` | Template is rendered with `is_initializing` flag when caches are cold; i18n filter is registered exactly once per environment instance. |
| `ModelListingHandler` | `/api/lm/{prefix}/list` | `service.get_paginated_data`, `service.format_response` | Listings respect pagination query parameters and cap `page_size` at 100; every item is formatted before response. |
| `ModelManagementHandler` | Mutations (delete, exclude, metadata, preview, tags, rename, bulk delete, duplicate verification) | `ModelLifecycleService`, `MetadataSyncService`, `PreviewAssetService`, `TagUpdateService`, scanner cache/index | Cache state mirrors filesystem changes: deletes prune cache & hash index, preview replacements synchronise metadata and cache NSFW levels, metadata saves trigger cache resort when names change. |
| `ModelQueryHandler` | Read-only queries (top tags, folders, duplicates, metadata, URLs) | Service query helpers & scanner cache | Outputs always wrapped in `{"success": True}` when no error; duplicate/filename grouping omits empty entries; invalid parameters (e.g. missing `model_root`) return HTTP 400. |
| `ModelDownloadHandler` | `/api/lm/download-model`, `/download-model-get`, `/download-progress/{id}`, `/cancel-download-get` | `DownloadModelUseCase`, `DownloadCoordinator`, `WebSocketManager` | Payload validation errors become HTTP 400 without mutating download progress cache; early-access failures surface as HTTP 401; successful downloads cache progress snapshots that back both WebSocket broadcasts and polling endpoints. |
| `ModelCivitaiHandler` | CivitAI metadata routes | `MetadataSyncService`, metadata provider factory, `BulkMetadataRefreshUseCase` | `fetch_all_civitai` streams progress via `WebSocketBroadcastCallback`; version lookups validate model type before returning; local availability fields derive from hash lookups without mutating cache state. |
| `ModelMoveHandler` | `move_model`, `move_models_bulk` | `ModelMoveService` | Moves execute atomically per request; bulk operations aggregate success/failure per file set. |
| `ModelAutoOrganizeHandler` | `/api/lm/{prefix}/auto-organize` (GET/POST), `/auto-organize-progress` | `AutoOrganizeUseCase`, `WebSocketProgressCallback`, `WebSocketManager` | Enforces single-flight execution using the shared lock; progress broadcasts remain available to polling clients until explicitly cleared; conflicts return HTTP 409 with a descriptive error. |
## Use case boundaries
Each use case exposes a narrow asynchronous API that hides the underlying
services. Their error mapping is essential for predictable HTTP responses.
| Use case | Entry point | Dependencies | Guarantees |
| --- | --- | --- | --- |
| `DownloadModelUseCase` | `execute(payload)` | `DownloadCoordinator.schedule_download` | Translates `ValueError` into `DownloadModelValidationError` for HTTP 400, recognises early-access errors (`"401"` in message) and surfaces them as `DownloadModelEarlyAccessError`, forwards success dictionaries untouched. |
| `AutoOrganizeUseCase` | `execute(file_paths, progress_callback)` | `ModelFileService.auto_organize_models`, `WebSocketManager` lock | Guarded by `ws_manager` lock + status checks; raises `AutoOrganizeInProgressError` before invoking the file service when another run is already active. |
| `BulkMetadataRefreshUseCase` | `execute_with_error_handling(progress_callback)` | `MetadataSyncService`, `SettingsManager`, `WebSocketBroadcastCallback` | Iterates through cached models, applies metadata sync, emits progress snapshots that handlers broadcast unchanged. |
## Maintaining legacy contracts
The refactor preserves the invariants called out in the previous architecture
notes. The most critical ones are reiterated here to emphasise the
collaboration points:
1. **Cache mutations** Delete, exclude, rename, and bulk delete operations are
channelled through `ModelManagementHandler`. The handler delegates to
`ModelLifecycleService` or `MetadataSyncService`, and the scanner cache is
mutated in-place before the handler returns. The accompanying tests assert
that `scanner._cache.raw_data` and `scanner._hash_index` stay in sync after
each mutation.
2. **Preview updates** `PreviewAssetService.replace_preview` writes the new
asset, `MetadataSyncService` persists the JSON metadata, and
`scanner.update_preview_in_cache` mirrors the change. The handler returns
the static URL produced by `config.get_preview_static_url`, keeping browser
clients in lockstep with disk state.
3. **Download progress** `DownloadCoordinator.schedule_download` generates the
download identifier, registers a WebSocket progress callback, and caches the
latest numeric progress via `WebSocketManager`. Both `download_model`
responses and `/download-progress/{id}` polling read from the same cache to
guarantee consistent progress reporting across transports.
## Extending the stack
To add a new shared route:
1. Declare it in `COMMON_ROUTE_DEFINITIONS` using a unique handler name.
2. Implement the corresponding coroutine on one of the handlers inside
`ModelHandlerSet` (or introduce a new handler class when the concern does not
fit existing ones).
3. Inject additional dependencies in `BaseModelRoutes._create_handler_set` by
wiring services or use cases through the constructor parameters.
Model-specific routes should continue to be registered inside the subclass
implementation of `setup_specific_routes`, reusing the shared registrar where
possible.

View File

@@ -0,0 +1,34 @@
# Multi-Library Management for Standalone Mode
## Requirements Summary
- **Independent libraries**: In standalone mode, users can maintain multiple libraries, where each library represents a distinct set of model folders (LoRAs, checkpoints, embeddings, etc.). Only one library is active at any given time, but users need a fast way to switch between them.
- **Library-specific settings**: The fields that vary per library are `folder_paths`, `default_lora_root`, `default_checkpoint_root`, and `default_embedding_root` inside `settings.json`.
- **Persistent caches**: Every library must have its own SQLite persistent model cache so that metadata generated for one library does not leak into another.
- **Backward compatibility**: Existing single-library setups should continue to work. When no multi-library configuration is provided, the application should behave exactly as before.
## Proposed Design
1. **Library registry**
- Extend the standalone configuration to hold a list of libraries, each identified by a unique name.
- Each entry stores the folder path configuration plus any library-scoped metadata (e.g. creation time, display name).
- The active library key is stored separately to allow quick switching without rewriting the full config.
2. **Settings management**
- Update `settings_manager` to load and persist the library registry. When a library is activated, hydrate the in-memory settings object with that library's folder configuration.
- Provide helper methods for creating, renaming, and deleting libraries, ensuring validation for duplicate names and path collisions.
- Continue writing the active library settings to `settings.json` for compatibility, while storing the registry in a new section such as `libraries`.
3. **Persistent model cache**
- Derive the SQLite file path from the active library, e.g. `model_cache_<library>.sqlite` or a nested directory structure like `model_cache/<library>/models.sqlite`.
- Update `PersistentModelCache` so it resolves the database path dynamically whenever the active library changes. Ensure connections are closed before switching to avoid locking issues.
- Migrate existing single cache files by treating them as the default library's cache.
4. **Model scanning workflow**
- Modify `ModelScanner` and related services to react to library switches by clearing in-memory caches, re-reading folder paths, and rehydrating metadata from the library-specific SQLite cache.
- Provide API endpoints in standalone mode to list libraries, activate one, and trigger a rescan.
5. **UI/UX considerations**
- In the standalone UI, introduce a library selector component that surfaces available libraries and offers quick switching.
- Offer feedback when switching libraries (e.g. spinner while rescanning) and guard destructive actions with confirmation prompts.
## Implementation Notes
- **Data migration**: On startup, detect if the old `settings.json` structure is present. If so, create a default library entry using the current folder paths and point the active library to it.
- **Thread safety**: Ensure that any long-running scans are cancelled or awaited before switching libraries to prevent race conditions in cache writes.
- **Testing**: Add unit tests for the settings manager to cover library CRUD operations and cache path resolution. Include integration tests that simulate switching libraries and verifying that the correct models are loaded.
- **Documentation**: Update user guides to explain how to define libraries, switch between them, and where the new cache files are stored.
- **Extensibility**: Keep the design open to future per-library settings (e.g. auto-refresh intervals, metadata overrides) by storing library data as objects instead of flat maps.

View File

@@ -0,0 +1,89 @@
# Recipe route architecture
The recipe routing stack now mirrors the modular model route design. HTTP
bindings, controller wiring, handler orchestration, and business rules live in
separate layers so new behaviours can be added without re-threading the entire
feature. The diagram below outlines the flow for a typical request:
```mermaid
graph TD
subgraph HTTP
A[RecipeRouteRegistrar] -->|binds| B[RecipeRoutes controller]
end
subgraph Application
B --> C[RecipeHandlerSet]
C --> D1[Handlers]
D1 --> E1[Use cases]
E1 --> F1[Services / scanners]
end
subgraph Side Effects
F1 --> G1[Cache & fingerprint index]
F1 --> G2[Metadata files]
F1 --> G3[Temporary shares]
end
```
## Layer responsibilities
| Layer | Module(s) | Responsibility |
| --- | --- | --- |
| Registrar | `py/routes/recipe_route_registrar.py` | Declarative list of every recipe endpoint and helper methods that bind them to an `aiohttp` application. |
| Controller | `py/routes/base_recipe_routes.py`, `py/routes/recipe_routes.py` | Lazily resolves scanners/clients from the service registry, wires shared templates/i18n, instantiates `RecipeHandlerSet`, and exposes a `{handler_name: coroutine}` mapping for the registrar. |
| Handler set | `py/routes/handlers/recipe_handlers.py` | Thin HTTP adapters grouped by concern (page view, listings, queries, mutations, sharing). They normalise responses and translate service exceptions into HTTP status codes. |
| Services & scanners | `py/services/recipes/*.py`, `py/services/recipe_scanner.py`, `py/services/service_registry.py` | Concrete business logic: metadata parsing, persistence, sharing, fingerprint/index maintenance, and cache refresh. |
## Handler responsibilities & invariants
`RecipeHandlerSet` flattens purpose-built handler objects into the callables the
registrar binds. Each handler is responsible for a narrow concern and enforces a
set of invariants before returning:
| Handler | Key endpoints | Collaborators | Contracts |
| --- | --- | --- | --- |
| `RecipePageView` | `/loras/recipes` | `SettingsManager`, `server_i18n`, Jinja environment, recipe scanner getter | Template rendered with `is_initializing` flag when caches are still warming; i18n filter registered exactly once per environment instance. |
| `RecipeListingHandler` | `/api/lm/recipes`, `/api/lm/recipe/{id}` | `recipe_scanner.get_paginated_data`, `recipe_scanner.get_recipe_by_id` | Listings respect pagination and search filters; every item receives a `file_url` fallback even when metadata is incomplete; missing recipes become HTTP 404. |
| `RecipeQueryHandler` | Tag/base-model stats, syntax, LoRA lookups | Recipe scanner cache, `format_recipe_file_url` helper | Cache snapshots are reused without forcing refresh; duplicate lookups collapse groups by fingerprint; syntax lookups return helpful errors when LoRAs are absent. |
| `RecipeManagementHandler` | Save, update, reconnect, bulk delete, widget ingest | `RecipePersistenceService`, `RecipeAnalysisService`, recipe scanner | Persistence results propagate HTTP status codes; fingerprint/index updates flow through the scanner before returning; validation errors surface as HTTP 400 without touching disk. |
| `RecipeAnalysisHandler` | Uploaded/local/remote analysis | `RecipeAnalysisService`, `civitai_client`, recipe scanner | Unsupported content types map to HTTP 400; download errors (`RecipeDownloadError`) are not retried; every response includes a `loras` array for client compatibility. |
| `RecipeSharingHandler` | Share + download | `RecipeSharingService`, recipe scanner | Share responses provide a stable download URL and filename; expired shares surface as HTTP 404; downloads stream via `web.FileResponse` with attachment headers. |
## Use case boundaries
The dedicated services encapsulate long-running work so handlers stay thin.
| Use case | Entry point | Dependencies | Guarantees |
| --- | --- | --- | --- |
| `RecipeAnalysisService` | `analyze_uploaded_image`, `analyze_remote_image`, `analyze_local_image`, `analyze_widget_metadata` | `ExifUtils`, `RecipeParserFactory`, downloader factory, optional metadata collector/processor | Normalises missing/invalid payloads into `RecipeValidationError`; generates consistent fingerprint data to keep duplicate detection stable; temporary files are cleaned up after every analysis path. |
| `RecipePersistenceService` | `save_recipe`, `delete_recipe`, `update_recipe`, `reconnect_lora`, `bulk_delete`, `save_recipe_from_widget` | `ExifUtils`, recipe scanner, card preview sizing constants | Writes images/JSON metadata atomically; updates scanner caches and hash indices before returning; recalculates fingerprints whenever LoRA assignments change. |
| `RecipeSharingService` | `share_recipe`, `prepare_download` | `tempfile`, recipe scanner | Copies originals to TTL-managed temp files; metadata lookups re-use the scanner; expired shares trigger cleanup and `RecipeNotFoundError`. |
## Maintaining critical invariants
* **Cache updates** Mutations (`save`, `delete`, `bulk_delete`, `update`) call
back into the recipe scanner to mutate the in-memory cache and fingerprint
index before returning a response. Tests assert that these methods are invoked
even when stubbing persistence.
* **Fingerprint management** `RecipePersistenceService` recomputes
fingerprints whenever LoRA metadata changes and duplicate lookups use those
fingerprints to group recipes. Handlers bubble the resulting IDs so clients
can merge duplicates without an extra fetch.
* **Metadata synchronisation** Saving or reconnecting a recipe updates the
JSON sidecar, refreshes embedded metadata via `ExifUtils`, and instructs the
scanner to resort its cache. Sharing relies on this metadata to generate
filenames and ensure downloads stay in sync with on-disk state.
## Extending the stack
1. Declare the new endpoint in `ROUTE_DEFINITIONS` with a unique handler name.
2. Implement the coroutine on an existing handler or introduce a new handler
class inside `py/routes/handlers/recipe_handlers.py` when the concern does
not fit existing ones.
3. Wire additional collaborators inside
`BaseRecipeRoutes._create_handler_set` (inject new services or factories) and
expose helper getters on the handler owner if the handler needs to share
utilities.
Integration tests in `tests/routes/test_recipe_routes.py` exercise the listing,
mutation, analysis-error, and sharing paths end-to-end, ensuring the controller
and handler wiring remains valid as new capabilities are added.

View File

@@ -0,0 +1,51 @@
# Frontend DOM Fixture Strategy
This guide outlines how to reproduce the markup emitted by the Django templates while running Vitest in jsdom. The aim is to make it straightforward to write integration-style unit tests for managers and UI helpers without having to duplicate template fragments inline.
## Loading Template Markup
Vitest executes inside Node, so we can read the same HTML templates that ship with the extension:
1. Use the helper utilities from `tests/frontend/utils/domFixtures.js` to read files under the `templates/` directory.
2. Mount the returned markup into `document.body` (or any custom container) before importing the module under test so its query selectors resolve correctly.
```js
import { renderTemplate } from '../utils/domFixtures.js'; // adjust the relative path to your spec
beforeEach(() => {
renderTemplate('loras.html', {
dataset: { page: 'loras' }
});
});
```
The helper ensures the dataset is applied to the container, which mirrors how Django sets `data-page` in production.
## Working with Partial Components
Many features are implemented as template partials located under `templates/components/`. When a test only needs a fragment (for example, the progress panel or context menu markup), load the component file directly:
```js
const container = renderTemplate('components/progress_panel.html');
const progressPanel = container.querySelector('#progress-panel');
```
This pattern avoids hand-written fixture strings and keeps the tests aligned with the actual markup.
## Resetting Between Tests
The shared Vitest setup clears `document.body` and storage APIs before each test. If a suite adds additional DOM nodes outside of the body or needs to reset custom attributes mid-test, use `resetDom()` exported from `domFixtures.js`.
```js
import { resetDom } from '../utils/domFixtures.js';
afterEach(() => {
resetDom();
});
```
## Future Enhancements
- Provide typed helpers for injecting mock script tags (e.g., replicating ComfyUI globals).
- Compose higher-level fixtures that mimic specific pages (loras, checkpoints, recipes) once those managers receive dedicated suites.

View File

@@ -0,0 +1,44 @@
# LoRA & Checkpoints Filtering/Sorting Test Matrix
This matrix captures the scenarios that Phase 3 frontend tests should cover for the LoRA and Checkpoint managers. It focuses on how search, filter, sort, and duplicate badge toggles interact so future specs can share fixtures and expectations.
## Scope
- **Components**: `PageControls`, `FilterManager`, `SearchManager`, and `ModelDuplicatesManager` wiring invoked through `CheckpointsPageManager` and `LorasPageManager`.
- **Templates**: `templates/loras.html` and `templates/checkpoints.html` along with shared filter panel and toolbar partials.
- **APIs**: Requests issued through `baseModelApi.fetchModels` (via `resetAndReload`/`refreshModels`) and duplicates badge updates.
## Shared Setup Considerations
1. Render full page templates using `renderLorasPage` / `renderCheckpointsPage` helpers before importing modules so DOM queries resolve.
2. Stub storage helpers (`getStorageItem`, `setStorageItem`, `getSessionItem`, `setSessionItem`) to observe persistence behavior without mutating real storage.
3. Mock `sidebarManager` to capture refresh calls triggered after sort/filter actions.
4. Provide fake API implementations exposing `resetAndReload`, `refreshModels`, `fetchFromCivitai`, `toggleBulkMode`, and `clearCustomFilter` so control events remain asynchronous but deterministic.
5. Supply a minimal `ModelDuplicatesManager` mock exposing `toggleDuplicateMode`, `checkDuplicatesCount`, and `updateDuplicatesBadgeAfterRefresh` to validate duplicate badge wiring.
## Scenario Matrix
| ID | Feature | Scenario | LoRAs Expectations | Checkpoints Expectations | Notes |
| --- | --- | --- | --- | --- | --- |
| F-01 | Search filter | Typing a query updates `pageState.filters.search`, persists to session, and triggers `resetAndReload` on submit | Validate `SearchManager` writes query and reloads via API stub; confirm LoRA cards pass query downstream | Same as LoRAs | Cover `enter` press and clicking search icon |
| F-02 | Tag filter | Selecting a tag chip adds it to filters, applies active styling, and reloads results | Tag stored under `filters.tags`; `FilterManager.applyFilters` persists and triggers `resetAndReload(true)` | Same; ensure base model tag set is scoped to checkpoints dataset | Include removal path |
| F-03 | Base model filter | Toggling base model checkboxes updates `filters.baseModel`, persists, and reloads | Ensure only LoRA-supported models show; toggle multi-select | Ensure SDXL/Flux base models appear as expected | Capture UI state restored from storage on next init |
| F-04 | Favorites-only | Clicking favorites toggle updates session flag and calls `resetAndReload(true)` | Button gains `.active` class and API called | Same | Verify duplicates badge refresh when active |
| F-05 | Sort selection | Changing sort select saves preference (legacy + new format) and reloads | Confirm `PageControls.saveSortPreference` invoked with option and API called | Same with checkpoints-specific defaults | Cover `convertLegacySortFormat` branch |
| F-06 | Filter persistence | Re-initializing manager loads stored filters/sort and updates DOM | Filters pre-populate chips/checkboxes; favorites state restored | Same | Requires simulating repeated construction |
| F-07 | Combined filters | Applying search + tag + base model yields aggregated query params for fetch | Assert API receives merged filter payload | Same | Validate toast messaging for active filters |
| F-08 | Clearing filters | Using "Clear filters" resets state, storage, and reloads list | `FilterManager.clearFilters` empties `filters`, removes active class, shows toast | Same | Ensure favorites-only toggle unaffected |
| F-09 | Duplicate badge toggle | Pressing "Find duplicates" toggles duplicate mode and updates badge counts post-refresh | `ModelDuplicatesManager.toggleDuplicateMode` invoked and badge refresh called after API rebuild | Same plus checkpoint-specific duplicate badge dataset | Connects to future duplicate-specific specs |
| F-10 | Bulk actions menu | Opening bulk dropdown keeps filters intact and closes on outside click | Validate dropdown class toggling and no unintended reload | Same | Guard against regression when dropdown interacts with filters |
## Automation Coverage Status
- ✅ F-01 Search filter, F-02 Tag filter, F-03 Base model filter, F-04 Favorites-only toggle, F-05 Sort selection, and F-09 Duplicate badge toggle are covered by `tests/frontend/components/pageControls.filtering.test.js` for both LoRA and checkpoint pages.
- ⏳ F-06 Filter persistence, F-07 Combined filters, F-08 Clearing filters, and F-10 Bulk actions remain to be automated alongside upcoming bulk mode refinements.
## Coverage Gaps & Follow-Ups
- Write Vitest suites that exercise the matrix for both managers, sharing fixtures through page helpers to avoid duplication.
- Capture API parameter assertions by inspecting `baseModelApi.fetchModels` mocks rather than relying solely on state mutations.
- Add regression cases for legacy storage migrations (old filter keys) once fixtures exist for older payloads.
- Extend duplicate badge coverage with scenarios where `checkDuplicatesCount` signals zero duplicates versus pending calculations.

View File

@@ -0,0 +1,33 @@
# Frontend Automation Testing Roadmap
This roadmap tracks the planned rollout of automated testing for the ComfyUI LoRA Manager frontend. Each phase builds on the infrastructure introduced in this change set and records progress so future contributors can quickly identify the next tasks.
## Phase Overview
| Phase | Goal | Primary Focus | Status | Notes |
| --- | --- | --- | --- | --- |
| Phase 0 | Establish baseline tooling | Add Node test runner, jsdom environment, and seed smoke tests | ✅ Complete | Vitest + jsdom configured, example state tests committed |
| Phase 1 | Cover state management logic | Unit test selectors, derived data helpers, and storage utilities under `static/js/state` and `static/js/utils` | ✅ Complete | Storage helpers and state selectors now exercised via deterministic suites |
| Phase 2 | Test AppCore orchestration | Simulate page bootstrapping, infinite scroll hooks, and manager registration using JSDOM DOM fixtures | ✅ Complete | AppCore initialization + page feature suites now validate manager wiring, infinite scroll hooks, and onboarding gating |
| Phase 3 | Validate page-specific managers | Add focused suites for `loras`, `checkpoints`, `embeddings`, and `recipes` managers covering filtering, sorting, and bulk actions | ✅ Complete | LoRA/checkpoint suites expanded; embeddings + recipes managers now covered with initialization, filtering, and duplicate workflows |
| Phase 4 | Interaction-level regression tests | Exercise template fragments, modals, and menus to ensure UI wiring remains intact | ✅ Complete | Vitest DOM suites cover NSFW selector, recipe modal editing, and global context menus |
| Phase 5 | Continuous integration & coverage | Integrate frontend tests into CI workflow and track coverage metrics | ✅ Complete | CI workflow runs Vitest and aggregates V8 coverage into `coverage/frontend` via a dedicated script |
## Next Steps Checklist
- [x] Expand unit tests for `storageHelpers` covering migrations and namespace behavior.
- [x] Document DOM fixture strategy for reproducing template structures in tests.
- [x] Prototype AppCore initialization test that verifies manager bootstrapping with stubbed dependencies.
- [x] Add AppCore page feature suite exercising context menu creation and infinite scroll registration via DOM fixtures.
- [x] Extend AppCore orchestration tests to cover manager wiring, bulk menu setup, and onboarding gating scenarios.
- [x] Add interaction regression suites for context menus and recipe modals to complete Phase 4.
- [x] Evaluate integrating coverage reporting once test surface grows (> 20 specs).
- [x] Create shared fixtures for the loras and checkpoints pages once dedicated manager suites are added.
- [x] Draft focused test matrix for loras/checkpoints manager filtering and sorting paths ahead of Phase 3.
- [x] Implement LoRAs manager filtering/sorting specs for scenarios F-01F-05 & F-09; queue remaining edge cases after duplicate/bulk flows stabilize.
- [x] Implement checkpoints manager filtering/sorting specs for scenarios F-01F-05 & F-09; cover remaining paths alongside bulk action work.
- [x] Implement checkpoints page manager smoke tests covering initialization and duplicate badge wiring.
- [x] Outline focused checkpoints scenarios (filtering, sorting, duplicate badge toggles) to feed into the shared test matrix.
- [ ] Add duplicate badge regression coverage for zero/pending states after API refreshes.
Maintaining this roadmap alongside code changes will make it easier to append new automated test tasks and update their progress.

28
docs/library-switching.md Normal file
View File

@@ -0,0 +1,28 @@
# Library Switching and Preview Routes
Library switching no longer requires restarting the backend. The preview
thumbnails shown in the UI are now served through a dynamic endpoint that
resolves files against the folders registered for the active library at request
time. This allows the multi-library flow to update model roots without touching
the aiohttp router, so previews remain available immediately after a switch.
## How the dynamic preview endpoint works
* `config.get_preview_static_url()` now returns `/api/lm/previews?path=<encoded>`
for any preview path. The raw filesystem location is URL encoded so that it
can be passed through the query string without leaking directory structure in
the route itself.【F:py/config.py†L398-L404】
* `PreviewRoutes` exposes the `/api/lm/previews` handler which validates the
decoded path against the directories registered for the current library. The
request is rejected if it falls outside those roots or if the file does not
exist.【F:py/routes/preview_routes.py†L5-L21】【F:py/routes/handlers/preview_handlers.py†L9-L48】
* `Config` keeps an up-to-date cache of allowed preview roots. Every time a
library is applied the cache is rebuilt using the declared LoRA, checkpoint
and embedding directories (including symlink targets). The validation logic
checks preview requests against this cache.【F:py/config.py†L51-L68】【F:py/config.py†L180-L248】【F:py/config.py†L332-L346】
Both the ComfyUI runtime (`LoraManager.add_routes`) and the standalone launcher
(`StandaloneLoraManager.add_routes`) register the new preview routes instead of
mounting a static directory per root. Switching libraries therefore works
without restarting the application, and preview URLs generated before or after a
switch continue to resolve correctly.【F:py/lora_manager.py†L21-L82】【F:standalone.py†L302-L315】

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 Bytes",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "Keine Remote-Beispielbilder für dieses Modell auf Civitai verfügbar"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "Beispielbilder herunterladen",
"missingPath": "Bitte legen Sie einen Speicherort fest, bevor Sie Beispielbilder herunterladen.",
"unavailable": "Beispielbild-Downloads sind noch nicht verfügbar. Versuchen Sie es erneut, nachdem die Seite vollständig geladen ist."
},
"cleanupExampleImages": {
"label": "Beispielbild-Ordner bereinigen",
"success": "{count} Ordner wurden in den Papierkorb verschoben",
"none": "Keine Beispielbild-Ordner mussten bereinigt werden",
"partial": "Bereinigung abgeschlossen, {failures} Ordner übersprungen",
"error": "Fehler beim Bereinigen der Beispielbild-Ordner: {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Civitai API Key",
"civitaiApiKeyPlaceholder": "Geben Sie Ihren Civitai API Key ein",
"civitaiApiKeyHelp": "Wird für die Authentifizierung beim Herunterladen von Modellen von Civitai verwendet",
"openSettingsFileLocation": {
"label": "Einstellungsordner öffnen",
"tooltip": "Den Ordner mit der settings.json öffnen",
"success": "Einstellungsordner geöffnet",
"failed": "Einstellungsordner konnte nicht geöffnet werden"
},
"sections": {
"contentFiltering": "Inhaltsfilterung",
"videoSettings": "Video-Einstellungen",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "Aktive Bibliothek",
"activeLibraryHelp": "Zwischen den konfigurierten Bibliotheken wechseln, um die Standardordner zu aktualisieren. Eine Änderung der Auswahl lädt die Seite neu.",
"loadingLibraries": "Bibliotheken werden geladen...",
"noLibraries": "Keine Bibliotheken konfiguriert",
"defaultLoraRoot": "Standard-LoRA-Stammordner",
"defaultLoraRootHelp": "Legen Sie den Standard-LoRA-Stammordner für Downloads, Importe und Verschiebungen fest",
"defaultCheckpointRoot": "Standard-Checkpoint-Stammordner",
@@ -368,6 +393,7 @@
"viewSelected": "Auswahl anzeigen",
"addTags": "Allen Tags hinzufügen",
"setBaseModel": "Basis-Modell für alle festlegen",
"setContentRating": "Inhaltsbewertung für alle festlegen",
"copyAll": "Alle Syntax kopieren",
"refreshAll": "Alle Metadaten aktualisieren",
"moveAll": "Alle in Ordner verschieben",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "Inhaltsbewertung festlegen",
"current": "Aktuell",
"multiple": "Mehrere Werte",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "Stärke Min",
"strengthMax": "Stärke Max",
"strength": "Stärke",
"clipStrength": "Clip-Stärke",
"clipSkip": "Clip Skip",
"valuePlaceholder": "Wert",
"add": "Hinzufügen"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "Basis-Modell erfolgreich für {count} Modell(e) aktualisiert",
"bulkBaseModelUpdatePartial": "{success} Modelle aktualisiert, {failed} fehlgeschlagen",
"bulkBaseModelUpdateFailed": "Aktualisierung des Basis-Modells für ausgewählte Modelle fehlgeschlagen",
"bulkContentRatingUpdating": "Inhaltsbewertung wird für {count} Modell(e) aktualisiert...",
"bulkContentRatingSet": "Inhaltsbewertung auf {level} für {count} Modell(e) gesetzt",
"bulkContentRatingPartial": "Inhaltsbewertung auf {level} für {success} Modell(e) gesetzt, {failed} fehlgeschlagen",
"bulkContentRatingFailed": "Inhaltsbewertung für ausgewählte Modelle konnte nicht aktualisiert werden",
"invalidCharactersRemoved": "Ungültige Zeichen aus Dateiname entfernt",
"filenameCannotBeEmpty": "Dateiname darf nicht leer sein",
"renameFailed": "Fehler beim Umbenennen der Datei: {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "Kompakt-Modus {state}",
"settingSaveFailed": "Fehler beim Speichern der Einstellung: {message}",
"displayDensitySet": "Anzeige-Dichte auf {density} gesetzt",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "Fehler beim Ändern der Sprache: {message}",
"cacheCleared": "Cache-Dateien wurden erfolgreich gelöscht. Cache wird bei der nächsten Aktion neu aufgebaut.",
"cacheClearFailed": "Fehler beim Löschen des Caches: {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "Jetzt aktualisieren",
"refreshingIn": "Aktualisierung in",
"seconds": "Sekunden"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 Bytes",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "No remote example images available for this model on Civitai"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "Download example images",
"missingPath": "Set a download location before downloading example images.",
"unavailable": "Example image downloads aren't available yet. Try again after the page finishes loading."
},
"cleanupExampleImages": {
"label": "Clean up example image folders",
"success": "Moved {count} folder(s) to the deleted folder",
"none": "No example image folders needed cleanup",
"partial": "Cleanup completed with {failures} folder(s) skipped",
"error": "Failed to clean example image folders: {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Civitai API Key",
"civitaiApiKeyPlaceholder": "Enter your Civitai API key",
"civitaiApiKeyHelp": "Used for authentication when downloading models from Civitai",
"openSettingsFileLocation": {
"label": "Open settings folder",
"tooltip": "Open the folder containing settings.json",
"success": "Opened settings.json folder",
"failed": "Failed to open settings.json folder"
},
"sections": {
"contentFiltering": "Content Filtering",
"videoSettings": "Video Settings",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "Active Library",
"activeLibraryHelp": "Switch between configured libraries to update default folders. Changing the selection reloads the page.",
"loadingLibraries": "Loading libraries...",
"noLibraries": "No libraries configured",
"defaultLoraRoot": "Default LoRA Root",
"defaultLoraRootHelp": "Set the default LoRA root directory for downloads, imports and moves",
"defaultCheckpointRoot": "Default Checkpoint Root",
@@ -368,6 +393,7 @@
"viewSelected": "View Selected",
"addTags": "Add Tags to All",
"setBaseModel": "Set Base Model for All",
"setContentRating": "Set Content Rating for All",
"copyAll": "Copy All Syntax",
"refreshAll": "Refresh All Metadata",
"moveAll": "Move All to Folder",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "Set Content Rating",
"current": "Current",
"multiple": "Multiple values",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "Strength Min",
"strengthMax": "Strength Max",
"strength": "Strength",
"clipStrength": "Clip Strength",
"clipSkip": "Clip Skip",
"valuePlaceholder": "Value",
"add": "Add"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "Successfully updated base model for {count} model(s)",
"bulkBaseModelUpdatePartial": "Updated {success} model(s), failed {failed} model(s)",
"bulkBaseModelUpdateFailed": "Failed to update base model for selected models",
"bulkContentRatingUpdating": "Updating content rating for {count} model(s)...",
"bulkContentRatingSet": "Set content rating to {level} for {count} model(s)",
"bulkContentRatingPartial": "Set content rating to {level} for {success} model(s), {failed} failed",
"bulkContentRatingFailed": "Failed to update content rating for selected models",
"invalidCharactersRemoved": "Invalid characters removed from filename",
"filenameCannotBeEmpty": "File name cannot be empty",
"renameFailed": "Failed to rename file: {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "Compact Mode {state}",
"settingSaveFailed": "Failed to save setting: {message}",
"displayDensitySet": "Display Density set to {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "Failed to change language: {message}",
"cacheCleared": "Cache files have been cleared successfully. Cache will rebuild on next action.",
"cacheClearFailed": "Failed to clear cache: {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "Refresh Now",
"refreshingIn": "Refreshing in",
"seconds": "seconds"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 Bytes",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "No hay imágenes de ejemplo remotas disponibles para este modelo en Civitai"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "Descargar imágenes de ejemplo",
"missingPath": "Establece una ubicación de descarga antes de descargar imágenes de ejemplo.",
"unavailable": "Las descargas de imágenes de ejemplo aún no están disponibles. Intenta de nuevo después de que la página termine de cargar."
},
"cleanupExampleImages": {
"label": "Limpiar carpetas de imágenes de ejemplo",
"success": "Se movieron {count} carpeta(s) a la carpeta de eliminados",
"none": "No hay carpetas de imágenes de ejemplo que necesiten limpieza",
"partial": "Limpieza completada con {failures} carpeta(s) omitidas",
"error": "No se pudieron limpiar las carpetas de imágenes de ejemplo: {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Clave API de Civitai",
"civitaiApiKeyPlaceholder": "Introduce tu clave API de Civitai",
"civitaiApiKeyHelp": "Utilizada para autenticación al descargar modelos de Civitai",
"openSettingsFileLocation": {
"label": "Abrir carpeta de ajustes",
"tooltip": "Abrir la carpeta que contiene settings.json",
"success": "Carpeta de settings.json abierta",
"failed": "No se pudo abrir la carpeta de settings.json"
},
"sections": {
"contentFiltering": "Filtrado de contenido",
"videoSettings": "Configuración de video",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "Biblioteca activa",
"activeLibraryHelp": "Alterna entre las bibliotecas configuradas para actualizar las carpetas predeterminadas. Cambiar la selección recarga la página.",
"loadingLibraries": "Cargando bibliotecas...",
"noLibraries": "No hay bibliotecas configuradas",
"defaultLoraRoot": "Raíz predeterminada de LoRA",
"defaultLoraRootHelp": "Establecer el directorio raíz predeterminado de LoRA para descargas, importaciones y movimientos",
"defaultCheckpointRoot": "Raíz predeterminada de checkpoint",
@@ -368,6 +393,7 @@
"viewSelected": "Ver seleccionados",
"addTags": "Añadir etiquetas a todos",
"setBaseModel": "Establecer modelo base para todos",
"setContentRating": "Establecer clasificación de contenido para todos",
"copyAll": "Copiar toda la sintaxis",
"refreshAll": "Actualizar todos los metadatos",
"moveAll": "Mover todos a carpeta",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "Establecer clasificación de contenido",
"current": "Actual",
"multiple": "Valores múltiples",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "Fuerza mínima",
"strengthMax": "Fuerza máxima",
"strength": "Fuerza",
"clipStrength": "Fuerza de Clip",
"clipSkip": "Clip Skip",
"valuePlaceholder": "Valor",
"add": "Añadir"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "Modelo base actualizado exitosamente para {count} modelo(s)",
"bulkBaseModelUpdatePartial": "Actualizados {success} modelo(s), fallaron {failed} modelo(s)",
"bulkBaseModelUpdateFailed": "Error al actualizar el modelo base para los modelos seleccionados",
"bulkContentRatingUpdating": "Actualizando la clasificación de contenido para {count} modelo(s)...",
"bulkContentRatingSet": "Clasificación de contenido establecida en {level} para {count} modelo(s)",
"bulkContentRatingPartial": "Clasificación de contenido establecida en {level} para {success} modelo(s), {failed} fallaron",
"bulkContentRatingFailed": "No se pudo actualizar la clasificación de contenido para los modelos seleccionados",
"invalidCharactersRemoved": "Caracteres inválidos eliminados del nombre de archivo",
"filenameCannotBeEmpty": "El nombre de archivo no puede estar vacío",
"renameFailed": "Error al renombrar archivo: {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "Modo compacto {state}",
"settingSaveFailed": "Error al guardar configuración: {message}",
"displayDensitySet": "Densidad de visualización establecida a {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "Error al cambiar idioma: {message}",
"cacheCleared": "Archivos de caché limpiados exitosamente. La caché se reconstruirá en la próxima acción.",
"cacheClearFailed": "Error al limpiar caché: {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "Actualizar ahora",
"refreshingIn": "Actualizando en",
"seconds": "segundos"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 Octets",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "Aucune image d'exemple distante disponible pour ce modèle sur Civitai"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "Télécharger les images d'exemple",
"missingPath": "Définissez un emplacement de téléchargement avant de télécharger les images d'exemple.",
"unavailable": "Le téléchargement des images d'exemple n'est pas encore disponible. Réessayez après le chargement complet de la page."
},
"cleanupExampleImages": {
"label": "Nettoyer les dossiers d'images d'exemple",
"success": "{count} dossier(s) déplacé(s) vers le dossier supprimé",
"none": "Aucun dossier d'images d'exemple à nettoyer",
"partial": "Nettoyage terminé avec {failures} dossier(s) ignoré(s)",
"error": "Échec du nettoyage des dossiers d'images d'exemple : {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Clé API Civitai",
"civitaiApiKeyPlaceholder": "Entrez votre clé API Civitai",
"civitaiApiKeyHelp": "Utilisée pour l'authentification lors du téléchargement de modèles depuis Civitai",
"openSettingsFileLocation": {
"label": "Ouvrir le dossier des paramètres",
"tooltip": "Ouvrir le dossier contenant settings.json",
"success": "Dossier settings.json ouvert",
"failed": "Impossible d'ouvrir le dossier settings.json"
},
"sections": {
"contentFiltering": "Filtrage du contenu",
"videoSettings": "Paramètres vidéo",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "Bibliothèque active",
"activeLibraryHelp": "Basculer entre les bibliothèques configurées pour mettre à jour les dossiers par défaut. Changer la sélection recharge la page.",
"loadingLibraries": "Chargement des bibliothèques...",
"noLibraries": "Aucune bibliothèque configurée",
"defaultLoraRoot": "Racine LoRA par défaut",
"defaultLoraRootHelp": "Définir le répertoire racine LoRA par défaut pour les téléchargements, imports et déplacements",
"defaultCheckpointRoot": "Racine Checkpoint par défaut",
@@ -368,6 +393,7 @@
"viewSelected": "Voir la sélection",
"addTags": "Ajouter des tags à tous",
"setBaseModel": "Définir le modèle de base pour tous",
"setContentRating": "Définir la classification du contenu pour tous",
"copyAll": "Copier toute la syntaxe",
"refreshAll": "Actualiser toutes les métadonnées",
"moveAll": "Déplacer tout vers un dossier",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "Définir la classification du contenu",
"current": "Actuel",
"multiple": "Valeurs multiples",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "Force Min",
"strengthMax": "Force Max",
"strength": "Force",
"clipStrength": "Force Clip",
"clipSkip": "Clip Skip",
"valuePlaceholder": "Valeur",
"add": "Ajouter"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "Modèle de base mis à jour avec succès pour {count} modèle(s)",
"bulkBaseModelUpdatePartial": "{success} modèle(s) mis à jour, {failed} modèle(s) en échec",
"bulkBaseModelUpdateFailed": "Échec de la mise à jour du modèle de base pour les modèles sélectionnés",
"bulkContentRatingUpdating": "Mise à jour de la classification du contenu pour {count} modèle(s)...",
"bulkContentRatingSet": "Classification du contenu définie sur {level} pour {count} modèle(s)",
"bulkContentRatingPartial": "Classification du contenu définie sur {level} pour {success} modèle(s), {failed} échec(s)",
"bulkContentRatingFailed": "Impossible de mettre à jour la classification du contenu pour les modèles sélectionnés",
"invalidCharactersRemoved": "Caractères invalides supprimés du nom de fichier",
"filenameCannotBeEmpty": "Le nom de fichier ne peut pas être vide",
"renameFailed": "Échec du renommage du fichier : {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "Mode compact {state}",
"settingSaveFailed": "Échec de la sauvegarde du paramètre : {message}",
"displayDensitySet": "Densité d'affichage définie sur {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "Échec du changement de langue : {message}",
"cacheCleared": "Les fichiers de cache ont été vidés avec succès. Le cache sera reconstruit à la prochaine action.",
"cacheClearFailed": "Échec du vidage du cache : {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "Actualiser maintenant",
"refreshingIn": "Actualisation dans",
"seconds": "secondes"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

1267
locales/he.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0バイト",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "このモデルのCivitaiでのリモート例画像は利用できません"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "例画像をダウンロード",
"missingPath": "例画像をダウンロードする前にダウンロード場所を設定してください。",
"unavailable": "例画像のダウンロードはまだ利用できません。ページの読み込みが完了してから再度お試しください。"
},
"cleanupExampleImages": {
"label": "例画像フォルダをクリーンアップ",
"success": "{count} 個のフォルダを削除フォルダに移動しました",
"none": "クリーンアップが必要な例画像フォルダはありません",
"partial": "クリーンアップが完了しましたが、{failures} 個のフォルダはスキップされました",
"error": "例画像フォルダのクリーンアップに失敗しました:{message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Civitai APIキー",
"civitaiApiKeyPlaceholder": "Civitai APIキーを入力してください",
"civitaiApiKeyHelp": "Civitaiからモデルをダウンロードするときの認証に使用されます",
"openSettingsFileLocation": {
"label": "設定フォルダーを開く",
"tooltip": "settings.json を含むフォルダーを開きます",
"success": "settings.json フォルダーを開きました",
"failed": "settings.json フォルダーを開けませんでした"
},
"sections": {
"contentFiltering": "コンテンツフィルタリング",
"videoSettings": "動画設定",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "アクティブライブラリ",
"activeLibraryHelp": "設定済みのライブラリを切り替えてデフォルトのフォルダを更新します。選択を変更するとページが再読み込みされます。",
"loadingLibraries": "ライブラリを読み込み中...",
"noLibraries": "ライブラリが設定されていません",
"defaultLoraRoot": "デフォルトLoRAルート",
"defaultLoraRootHelp": "ダウンロード、インポート、移動用のデフォルトLoRAルートディレクトリを設定",
"defaultCheckpointRoot": "デフォルトCheckpointルート",
@@ -368,6 +393,7 @@
"viewSelected": "選択中を表示",
"addTags": "すべてにタグを追加",
"setBaseModel": "すべてにベースモデルを設定",
"setContentRating": "すべてのモデルのコンテンツレーティングを設定",
"copyAll": "すべての構文をコピー",
"refreshAll": "すべてのメタデータを更新",
"moveAll": "すべてをフォルダに移動",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "コンテンツレーティングを設定",
"current": "現在",
"multiple": "複数の値",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "強度最小",
"strengthMax": "強度最大",
"strength": "強度",
"clipStrength": "クリップ強度",
"clipSkip": "Clip Skip",
"valuePlaceholder": "値",
"add": "追加"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "{count} モデルのベースモデルが正常に更新されました",
"bulkBaseModelUpdatePartial": "{success} モデルを更新、{failed} モデルは失敗しました",
"bulkBaseModelUpdateFailed": "選択したモデルのベースモデルの更新に失敗しました",
"bulkContentRatingUpdating": "{count} 件のモデルのコンテンツレーティングを更新中...",
"bulkContentRatingSet": "{count} 件のモデルのコンテンツレーティングを {level} に設定しました",
"bulkContentRatingPartial": "{success} 件のモデルのコンテンツレーティングを {level} に設定、{failed} 件は失敗しました",
"bulkContentRatingFailed": "選択したモデルのコンテンツレーティングを更新できませんでした",
"invalidCharactersRemoved": "ファイル名から無効な文字が削除されました",
"filenameCannotBeEmpty": "ファイル名を空にすることはできません",
"renameFailed": "ファイル名の変更に失敗しました:{message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "コンパクトモード {state}",
"settingSaveFailed": "設定の保存に失敗しました:{message}",
"displayDensitySet": "表示密度が {density} に設定されました",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "言語の変更に失敗しました:{message}",
"cacheCleared": "キャッシュファイルが正常にクリアされました。次回のアクションでキャッシュが再構築されます。",
"cacheClearFailed": "キャッシュのクリアに失敗しました:{error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "今すぐ更新",
"refreshingIn": "更新まで",
"seconds": "秒"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 바이트",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "Civitai에서 이 모델의 원격 예시 이미지를 사용할 수 없습니다"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "예시 이미지 다운로드",
"missingPath": "예시 이미지를 다운로드하기 전에 다운로드 위치를 설정하세요.",
"unavailable": "예시 이미지 다운로드는 아직 사용할 수 없습니다. 페이지 로딩이 완료된 후 다시 시도하세요."
},
"cleanupExampleImages": {
"label": "예시 이미지 폴더 정리",
"success": "{count}개의 폴더가 삭제 폴더로 이동되었습니다",
"none": "정리가 필요한 예시 이미지 폴더가 없습니다",
"partial": "정리가 완료되었으나 {failures}개의 폴더가 건너뛰어졌습니다",
"error": "예시 이미지 폴더 정리에 실패했습니다: {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Civitai API 키",
"civitaiApiKeyPlaceholder": "Civitai API 키를 입력하세요",
"civitaiApiKeyHelp": "Civitai에서 모델을 다운로드할 때 인증에 사용됩니다",
"openSettingsFileLocation": {
"label": "설정 폴더 열기",
"tooltip": "settings.json이 있는 폴더를 엽니다",
"success": "settings.json 폴더를 열었습니다",
"failed": "settings.json 폴더를 열지 못했습니다"
},
"sections": {
"contentFiltering": "콘텐츠 필터링",
"videoSettings": "비디오 설정",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "활성 라이브러리",
"activeLibraryHelp": "구성된 라이브러리를 전환하여 기본 폴더를 업데이트합니다. 선택을 변경하면 페이지가 다시 로드됩니다.",
"loadingLibraries": "라이브러리를 불러오는 중...",
"noLibraries": "구성된 라이브러리가 없습니다",
"defaultLoraRoot": "기본 LoRA 루트",
"defaultLoraRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 LoRA 루트 디렉토리를 설정합니다",
"defaultCheckpointRoot": "기본 Checkpoint 루트",
@@ -368,6 +393,7 @@
"viewSelected": "선택 항목 보기",
"addTags": "모두에 태그 추가",
"setBaseModel": "모두에 베이스 모델 설정",
"setContentRating": "모든 모델에 콘텐츠 등급 설정",
"copyAll": "모든 문법 복사",
"refreshAll": "모든 메타데이터 새로고침",
"moveAll": "모두 폴더로 이동",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "콘텐츠 등급 설정",
"current": "현재",
"multiple": "여러 값",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "최소 강도",
"strengthMax": "최대 강도",
"strength": "강도",
"clipStrength": "클립 강도",
"clipSkip": "클립 스킵",
"valuePlaceholder": "값",
"add": "추가"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "{count}개의 모델에 베이스 모델이 성공적으로 업데이트되었습니다",
"bulkBaseModelUpdatePartial": "{success}개의 모델이 업데이트되었고, {failed}개의 모델이 실패했습니다",
"bulkBaseModelUpdateFailed": "선택한 모델의 베이스 모델 업데이트에 실패했습니다",
"bulkContentRatingUpdating": "{count}개 모델의 콘텐츠 등급을 업데이트하는 중...",
"bulkContentRatingSet": "{count}개 모델의 콘텐츠 등급을 {level}(으)로 설정했습니다",
"bulkContentRatingPartial": "{success}개 모델의 콘텐츠 등급을 {level}(으)로 설정했고, {failed}개는 실패했습니다",
"bulkContentRatingFailed": "선택한 모델의 콘텐츠 등급을 업데이트하지 못했습니다",
"invalidCharactersRemoved": "파일명에서 잘못된 문자가 제거되었습니다",
"filenameCannotBeEmpty": "파일 이름은 비어있을 수 없습니다",
"renameFailed": "파일 이름 변경 실패: {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "컴팩트 모드 {state}",
"settingSaveFailed": "설정 저장 실패: {message}",
"displayDensitySet": "표시 밀도가 {density}로 설정되었습니다",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "언어 변경 실패: {message}",
"cacheCleared": "캐시 파일이 성공적으로 지워졌습니다. 다음 작업 시 캐시가 재구축됩니다.",
"cacheClearFailed": "캐시 지우기 실패: {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "지금 새로고침",
"refreshingIn": "새로고침까지",
"seconds": "초"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 Байт",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "Нет удаленных примеров изображений для этой модели на Civitai"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "Загрузить примеры изображений",
"missingPath": "Укажите место загрузки перед загрузкой примеров изображений.",
"unavailable": "Загрузка примеров изображений пока недоступна. Попробуйте снова после полной загрузки страницы."
},
"cleanupExampleImages": {
"label": "Очистить папки с примерами изображений",
"success": "Перемещено {count} папок в папку удалённых",
"none": "Нет папок с примерами изображений, требующих очистки",
"partial": "Очистка завершена, пропущено {failures} папок",
"error": "Не удалось очистить папки с примерами изображений: {message}"
}
},
"header": {
"appTitle": "LoRA Manager",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Ключ API Civitai",
"civitaiApiKeyPlaceholder": "Введите ваш ключ API Civitai",
"civitaiApiKeyHelp": "Используется для аутентификации при загрузке моделей с Civitai",
"openSettingsFileLocation": {
"label": "Открыть папку настроек",
"tooltip": "Открыть папку, содержащую settings.json",
"success": "Папка settings.json открыта",
"failed": "Не удалось открыть папку settings.json"
},
"sections": {
"contentFiltering": "Фильтрация контента",
"videoSettings": "Настройки видео",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "Активная библиотека",
"activeLibraryHelp": "Переключайтесь между настроенными библиотеками, чтобы обновить папки по умолчанию. Изменение выбора перезагружает страницу.",
"loadingLibraries": "Загрузка библиотек...",
"noLibraries": "Библиотеки не настроены",
"defaultLoraRoot": "Корневая папка LoRA по умолчанию",
"defaultLoraRootHelp": "Установить корневую папку LoRA по умолчанию для загрузок, импорта и перемещений",
"defaultCheckpointRoot": "Корневая папка Checkpoint по умолчанию",
@@ -368,6 +393,7 @@
"viewSelected": "Просмотреть выбранные",
"addTags": "Добавить теги ко всем",
"setBaseModel": "Установить базовую модель для всех",
"setContentRating": "Установить рейтинг контента для всех",
"copyAll": "Копировать весь синтаксис",
"refreshAll": "Обновить все метаданные",
"moveAll": "Переместить все в папку",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "Установить рейтинг контента",
"current": "Текущий",
"multiple": "Несколько значений",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "Мин. сила",
"strengthMax": "Макс. сила",
"strength": "Сила",
"clipStrength": "Сила клипа",
"clipSkip": "Clip Skip",
"valuePlaceholder": "Значение",
"add": "Добавить"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "Базовая модель успешно обновлена для {count} моделей",
"bulkBaseModelUpdatePartial": "Обновлено {success} моделей, не удалось обновить {failed} моделей",
"bulkBaseModelUpdateFailed": "Не удалось обновить базовую модель для выбранных моделей",
"bulkContentRatingUpdating": "Обновление рейтинга контента для {count} модель(ей)...",
"bulkContentRatingSet": "Рейтинг контента установлен на {level} для {count} модель(ей)",
"bulkContentRatingPartial": "Рейтинг контента {level} установлен для {success} модель(ей), {failed} не удалось",
"bulkContentRatingFailed": "Не удалось обновить рейтинг контента для выбранных моделей",
"invalidCharactersRemoved": "Недопустимые символы удалены из имени файла",
"filenameCannotBeEmpty": "Имя файла не может быть пустым",
"renameFailed": "Не удалось переименовать файл: {message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "Компактный режим {state}",
"settingSaveFailed": "Не удалось сохранить настройку: {message}",
"displayDensitySet": "Плотность отображения установлена на {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "Не удалось изменить язык: {message}",
"cacheCleared": "Файлы кэша успешно очищены. Кэш будет пересобран при следующем действии.",
"cacheClearFailed": "Не удалось очистить кэш: {error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "Обновить сейчас",
"refreshingIn": "Обновление через",
"seconds": "секунд"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -21,11 +21,18 @@
"disabled": "已禁用"
},
"language": {
"select": "Language",
"select_help": "Choose your preferred language for the interface",
"select": "选择语言",
"select_help": "选择你喜欢的界面语言",
"english": "English",
"chinese_simplified": "中文(简体)",
"chinese_traditional": "中文(繁体)",
"russian": "俄语",
"german": "德语",
"japanese": "日语",
"korean": "韩语",
"french": "法语",
"spanish": "西班牙语",
"Hebrew": "עברית",
"russian": "Русский",
"german": "Deutsch",
"japanese": "日本語",
@@ -122,6 +129,20 @@
"noRemoteImagesAvailable": "此模型在 Civitai 上没有远程示例图片"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "下载示例图片",
"missingPath": "请先设置下载位置后再下载示例图片。",
"unavailable": "示例图片下载当前不可用。请在页面加载完成后重试。"
},
"cleanupExampleImages": {
"label": "清理示例图片文件夹",
"success": "已将 {count} 个文件夹移动到已删除文件夹",
"none": "没有需要清理的示例图片文件夹",
"partial": "清理完成,有 {failures} 个文件夹跳过",
"error": "清理示例图片文件夹失败:{message}"
}
},
"header": {
"appTitle": "LoRA 管理器",
"navigation": {
@@ -173,6 +194,12 @@
"civitaiApiKey": "Civitai API 密钥",
"civitaiApiKeyPlaceholder": "请输入你的 Civitai API 密钥",
"civitaiApiKeyHelp": "用于从 Civitai 下载模型时的身份验证",
"openSettingsFileLocation": {
"label": "打开设置文件夹",
"tooltip": "打开包含 settings.json 的文件夹",
"success": "已打开 settings.json 文件夹",
"failed": "无法打开 settings.json 文件夹"
},
"sections": {
"contentFiltering": "内容过滤",
"videoSettings": "视频设置",
@@ -220,6 +247,10 @@
}
},
"folderSettings": {
"activeLibrary": "活动库",
"activeLibraryHelp": "在已配置的库之间切换以更新默认文件夹。更改选择将重新加载页面。",
"loadingLibraries": "正在加载库...",
"noLibraries": "尚未配置库",
"defaultLoraRoot": "默认 LoRA 根目录",
"defaultLoraRootHelp": "设置下载、导入和移动时的默认 LoRA 根目录",
"defaultCheckpointRoot": "默认 Checkpoint 根目录",
@@ -368,6 +399,7 @@
"viewSelected": "查看已选中",
"addTags": "为所有添加标签",
"setBaseModel": "为所有设置基础模型",
"setContentRating": "为全部设置内容评级",
"copyAll": "复制全部语法",
"refreshAll": "刷新全部元数据",
"moveAll": "全部移动到文件夹",
@@ -592,6 +624,7 @@
"contentRating": {
"title": "设置内容评级",
"current": "当前",
"multiple": "多个值",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +760,7 @@
"strengthMin": "最小强度",
"strengthMax": "最大强度",
"strength": "强度",
"clipStrength": "Clip 强度",
"clipSkip": "Clip Skip",
"valuePlaceholder": "数值",
"add": "添加"
@@ -1069,6 +1103,10 @@
"bulkBaseModelUpdateSuccess": "成功为 {count} 个模型更新基础模型",
"bulkBaseModelUpdatePartial": "更新了 {success} 个模型,{failed} 个失败",
"bulkBaseModelUpdateFailed": "为选中模型更新基础模型失败",
"bulkContentRatingUpdating": "正在为 {count} 个模型更新内容评级...",
"bulkContentRatingSet": "已将 {count} 个模型的内容评级设置为 {level}",
"bulkContentRatingPartial": "已将 {success} 个模型的内容评级设置为 {level}{failed} 个失败",
"bulkContentRatingFailed": "未能更新所选模型的内容评级",
"invalidCharactersRemoved": "文件名中的无效字符已移除",
"filenameCannotBeEmpty": "文件名不能为空",
"renameFailed": "重命名文件失败:{message}",
@@ -1103,6 +1141,8 @@
"compactModeToggled": "紧凑模式 {state}",
"settingSaveFailed": "保存设置失败:{message}",
"displayDensitySet": "显示密度已设置为 {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "切换语言失败:{message}",
"cacheCleared": "缓存文件已成功清除。下次操作将重建缓存。",
"cacheClearFailed": "清除缓存失败:{error}",
@@ -1222,6 +1262,12 @@
"refreshNow": "立即刷新",
"refreshingIn": "将在",
"seconds": "秒后刷新"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

View File

@@ -31,7 +31,8 @@
"japanese": "日本語",
"korean": "한국어",
"french": "Français",
"spanish": "Español"
"spanish": "Español",
"Hebrew": "עברית"
},
"fileSize": {
"zero": "0 位元組",
@@ -122,6 +123,20 @@
"noRemoteImagesAvailable": "此模型在 Civitai 上無遠端範例圖片"
}
},
"globalContextMenu": {
"downloadExampleImages": {
"label": "下載範例圖片",
"missingPath": "請先設定下載位置再下載範例圖片。",
"unavailable": "範例圖片下載目前尚不可用。請在頁面載入完成後再試一次。"
},
"cleanupExampleImages": {
"label": "清理範例圖片資料夾",
"success": "已將 {count} 個資料夾移至已刪除資料夾",
"none": "沒有需要清理的範例圖片資料夾",
"partial": "清理完成,有 {failures} 個資料夾略過",
"error": "清理範例圖片資料夾失敗:{message}"
}
},
"header": {
"appTitle": "LoRA 管理器",
"navigation": {
@@ -173,6 +188,12 @@
"civitaiApiKey": "Civitai API 金鑰",
"civitaiApiKeyPlaceholder": "請輸入您的 Civitai API 金鑰",
"civitaiApiKeyHelp": "用於從 Civitai 下載模型時的身份驗證",
"openSettingsFileLocation": {
"label": "開啟設定資料夾",
"tooltip": "開啟包含 settings.json 的資料夾",
"success": "已開啟 settings.json 資料夾",
"failed": "無法開啟 settings.json 資料夾"
},
"sections": {
"contentFiltering": "內容過濾",
"videoSettings": "影片設定",
@@ -220,6 +241,10 @@
}
},
"folderSettings": {
"activeLibrary": "使用中的資料庫",
"activeLibraryHelp": "在已設定的資料庫之間切換以更新預設資料夾。變更選項會重新載入頁面。",
"loadingLibraries": "正在載入資料庫...",
"noLibraries": "尚未設定任何資料庫",
"defaultLoraRoot": "預設 LoRA 根目錄",
"defaultLoraRootHelp": "設定下載、匯入和移動時的預設 LoRA 根目錄",
"defaultCheckpointRoot": "預設 Checkpoint 根目錄",
@@ -368,6 +393,7 @@
"viewSelected": "檢視已選取",
"addTags": "新增標籤到全部",
"setBaseModel": "設定全部基礎模型",
"setContentRating": "為全部設定內容分級",
"copyAll": "複製全部語法",
"refreshAll": "刷新全部 metadata",
"moveAll": "全部移動到資料夾",
@@ -592,6 +618,7 @@
"contentRating": {
"title": "設定內容分級",
"current": "目前",
"multiple": "多個值",
"levels": {
"pg": "PG",
"pg13": "PG13",
@@ -727,6 +754,7 @@
"strengthMin": "最小強度",
"strengthMax": "最大強度",
"strength": "強度",
"clipStrength": "Clip 強度",
"clipSkip": "Clip Skip",
"valuePlaceholder": "數值",
"add": "新增"
@@ -1069,6 +1097,10 @@
"bulkBaseModelUpdateSuccess": "已成功為 {count} 個模型更新基礎模型",
"bulkBaseModelUpdatePartial": "已更新 {success} 個模型,{failed} 個模型失敗",
"bulkBaseModelUpdateFailed": "更新所選模型的基礎模型失敗",
"bulkContentRatingUpdating": "正在為 {count} 個模型更新內容分級...",
"bulkContentRatingSet": "已將 {count} 個模型的內容分級設定為 {level}",
"bulkContentRatingPartial": "已將 {success} 個模型的內容分級設定為 {level}{failed} 個失敗",
"bulkContentRatingFailed": "無法更新所選模型的內容分級",
"invalidCharactersRemoved": "已移除檔名中的無效字元",
"filenameCannotBeEmpty": "檔案名稱不可為空",
"renameFailed": "重新命名檔案失敗:{message}",
@@ -1103,6 +1135,8 @@
"compactModeToggled": "緊湊模式已{state}",
"settingSaveFailed": "儲存設定失敗:{message}",
"displayDensitySet": "顯示密度已設為 {density}",
"libraryLoadFailed": "Failed to load libraries: {message}",
"libraryActivateFailed": "Failed to activate library: {message}",
"languageChangeFailed": "切換語言失敗:{message}",
"cacheCleared": "快取檔案已成功清除。快取將於下次操作時重建。",
"cacheClearFailed": "清除快取失敗:{error}",
@@ -1222,6 +1256,12 @@
"refreshNow": "立即重新整理",
"refreshingIn": "將於",
"seconds": "秒後重新整理"
},
"communitySupport": {
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
"supportCta": "Support on Ko-fi",
"learnMore": "LM Civitai Extension Tutorial"
}
}
}

2572
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

15
package.json Normal file
View File

@@ -0,0 +1,15 @@
{
"name": "comfyui-lora-manager-frontend",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"test:coverage": "node scripts/run_frontend_coverage.js"
},
"devDependencies": {
"jsdom": "^24.0.0",
"vitest": "^1.6.0"
}
}

View File

@@ -0,0 +1,12 @@
"""Project namespace package."""
# pytest's internal compatibility layer still imports ``py.path.local`` from the
# historical ``py`` dependency. Because this project reuses the ``py`` package
# name, we expose a minimal shim so ``py.path.local`` resolves to ``pathlib.Path``
# during test runs without pulling in the external dependency.
from pathlib import Path
from types import SimpleNamespace
path = SimpleNamespace(local=Path)
__all__ = ["path"]

View File

@@ -1,17 +1,50 @@
import os
import platform
from pathlib import Path
import folder_paths # type: ignore
from typing import List
from typing import Dict, Iterable, List, Mapping, Set
import logging
import sys
import json
import urllib.parse
# Check if running in standalone mode
standalone_mode = 'nodes' not in sys.modules
from .utils.settings_paths import ensure_settings_file
# Use an environment variable to control standalone mode
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
logger = logging.getLogger(__name__)
def _normalize_folder_paths_for_comparison(
folder_paths: Mapping[str, Iterable[str]]
) -> Dict[str, Set[str]]:
"""Normalize folder paths for comparison across libraries."""
normalized: Dict[str, Set[str]] = {}
for key, values in folder_paths.items():
if isinstance(values, str):
candidate_values: Iterable[str] = [values]
else:
try:
candidate_values = iter(values)
except TypeError:
continue
normalized_values: Set[str] = set()
for value in candidate_values:
if not isinstance(value, str):
continue
stripped = value.strip()
if not stripped:
continue
normalized_values.add(os.path.normcase(os.path.normpath(stripped)))
if normalized_values:
normalized[key] = normalized_values
return normalized
class Config:
"""Global configuration for LoRA Manager"""
@@ -20,9 +53,9 @@ class Config:
self.static_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static')
self.i18n_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'locales')
# Path mapping dictionary, target to link mapping
self._path_mappings = {}
# Static route mapping dictionary, target to route mapping
self._route_mappings = {}
self._path_mappings: Dict[str, str] = {}
# Normalized preview root directories used to validate preview access
self._preview_root_paths: Set[Path] = set()
self.loras_roots = self._init_lora_paths()
self.checkpoints_roots = None
self.unet_roots = None
@@ -31,45 +64,73 @@ class Config:
self.embeddings_roots = self._init_embedding_paths()
# Scan symbolic links during initialization
self._scan_symbolic_links()
self._rebuild_preview_roots()
if not standalone_mode:
# Save the paths to settings.json when running in ComfyUI mode
self.save_folder_paths_to_settings()
def save_folder_paths_to_settings(self):
"""Save folder paths to settings.json for standalone mode to use later"""
"""Persist ComfyUI-derived folder paths to the multi-library settings."""
try:
# Check if we're running in ComfyUI mode (not standalone)
# Load existing settings
settings_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'settings.json')
settings = {}
if os.path.exists(settings_path):
with open(settings_path, 'r', encoding='utf-8') as f:
settings = json.load(f)
# Update settings with paths
settings['folder_paths'] = {
'loras': self.loras_roots,
'checkpoints': self.checkpoints_roots,
'unet': self.unet_roots,
'embeddings': self.embeddings_roots,
}
# Add default roots if there's only one item and key doesn't exist
if len(self.loras_roots) == 1 and "default_lora_root" not in settings:
settings["default_lora_root"] = self.loras_roots[0]
if self.checkpoints_roots and len(self.checkpoints_roots) == 1 and "default_checkpoint_root" not in settings:
settings["default_checkpoint_root"] = self.checkpoints_roots[0]
ensure_settings_file(logger)
from .services.settings_manager import settings as settings_service
if self.embeddings_roots and len(self.embeddings_roots) == 1 and "default_embedding_root" not in settings:
settings["default_embedding_root"] = self.embeddings_roots[0]
# Save settings
with open(settings_path, 'w', encoding='utf-8') as f:
json.dump(settings, f, indent=2)
logger.info("Saved folder paths to settings.json")
libraries = settings_service.get_libraries()
comfy_library = libraries.get("comfyui", {})
default_library = libraries.get("default", {})
target_folder_paths = {
'loras': list(self.loras_roots),
'checkpoints': list(self.checkpoints_roots or []),
'unet': list(self.unet_roots or []),
'embeddings': list(self.embeddings_roots or []),
}
normalized_target_paths = _normalize_folder_paths_for_comparison(target_folder_paths)
if (not comfy_library and default_library and normalized_target_paths and
_normalize_folder_paths_for_comparison(default_library.get("folder_paths", {})) ==
normalized_target_paths):
try:
settings_service.rename_library("default", "comfyui")
logger.info("Renamed legacy 'default' library to 'comfyui'")
libraries = settings_service.get_libraries()
comfy_library = libraries.get("comfyui", {})
except Exception as rename_error:
logger.debug(
"Failed to rename legacy 'default' library: %s", rename_error
)
default_lora_root = comfy_library.get("default_lora_root", "")
if not default_lora_root and len(self.loras_roots) == 1:
default_lora_root = self.loras_roots[0]
default_checkpoint_root = comfy_library.get("default_checkpoint_root", "")
if (not default_checkpoint_root and self.checkpoints_roots and
len(self.checkpoints_roots) == 1):
default_checkpoint_root = self.checkpoints_roots[0]
default_embedding_root = comfy_library.get("default_embedding_root", "")
if (not default_embedding_root and self.embeddings_roots and
len(self.embeddings_roots) == 1):
default_embedding_root = self.embeddings_roots[0]
metadata = dict(comfy_library.get("metadata", {}))
metadata.setdefault("display_name", "ComfyUI")
metadata["source"] = "comfyui"
settings_service.upsert_library(
"comfyui",
folder_paths=target_folder_paths,
default_lora_root=default_lora_root,
default_checkpoint_root=default_checkpoint_root,
default_embedding_root=default_embedding_root,
metadata=metadata,
activate=True,
)
logger.info("Updated 'comfyui' library with current folder paths")
except Exception as e:
logger.warning(f"Failed to save folder paths: {e}")
@@ -126,12 +187,65 @@ class Config:
# Keep the original mapping: target path -> link path
self._path_mappings[normalized_target] = normalized_link
logger.info(f"Added path mapping: {normalized_target} -> {normalized_link}")
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
def add_route_mapping(self, path: str, route: str):
"""Add a static route mapping"""
normalized_path = os.path.normpath(path).replace(os.sep, '/')
self._route_mappings[normalized_path] = route
# logger.info(f"Added route mapping: {normalized_path} -> {route}")
def _expand_preview_root(self, path: str) -> Set[Path]:
"""Return normalized ``Path`` objects representing a preview root."""
roots: Set[Path] = set()
if not path:
return roots
try:
raw_path = Path(path).expanduser()
except Exception:
return roots
if raw_path.is_absolute():
roots.add(raw_path)
try:
resolved = raw_path.resolve(strict=False)
except RuntimeError:
resolved = raw_path.absolute()
roots.add(resolved)
try:
real_path = raw_path.resolve()
except (FileNotFoundError, RuntimeError):
real_path = resolved
roots.add(real_path)
normalized: Set[Path] = set()
for candidate in roots:
if candidate.is_absolute():
normalized.add(candidate)
else:
try:
normalized.add(candidate.resolve(strict=False))
except RuntimeError:
normalized.add(candidate.absolute())
return normalized
def _rebuild_preview_roots(self) -> None:
"""Recompute the cache of directories permitted for previews."""
preview_roots: Set[Path] = set()
for root in self.loras_roots or []:
preview_roots.update(self._expand_preview_root(root))
for root in self.base_models_roots or []:
preview_roots.update(self._expand_preview_root(root))
for root in self.embeddings_roots or []:
preview_roots.update(self._expand_preview_root(root))
for target, link in self._path_mappings.items():
preview_roots.update(self._expand_preview_root(target))
preview_roots.update(self._expand_preview_root(link))
self._preview_root_paths = {path for path in preview_roots if path.is_absolute()}
def map_path_to_link(self, path: str) -> str:
"""Map a target path back to its symbolic link path"""
@@ -155,31 +269,93 @@ class Config:
return mapped_path
return link_path
def _dedupe_existing_paths(self, raw_paths: Iterable[str]) -> Dict[str, str]:
dedup: Dict[str, str] = {}
for path in raw_paths:
if not isinstance(path, str):
continue
if not os.path.exists(path):
continue
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
normalized = os.path.normpath(path).replace(os.sep, '/')
if real_path not in dedup:
dedup[real_path] = normalized
return dedup
def _prepare_lora_paths(self, raw_paths: Iterable[str]) -> List[str]:
path_map = self._dedupe_existing_paths(raw_paths)
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
for original_path in unique_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return unique_paths
def _prepare_checkpoint_paths(
self, checkpoint_paths: Iterable[str], unet_paths: Iterable[str]
) -> List[str]:
checkpoint_map = self._dedupe_existing_paths(checkpoint_paths)
unet_map = self._dedupe_existing_paths(unet_paths)
merged_map: Dict[str, str] = {}
for real_path, original in {**checkpoint_map, **unet_map}.items():
if real_path not in merged_map:
merged_map[real_path] = original
unique_paths = sorted(merged_map.values(), key=lambda p: p.lower())
checkpoint_values = set(checkpoint_map.values())
unet_values = set(unet_map.values())
self.checkpoints_roots = [p for p in unique_paths if p in checkpoint_values]
self.unet_roots = [p for p in unique_paths if p in unet_values]
for original_path in unique_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return unique_paths
def _prepare_embedding_paths(self, raw_paths: Iterable[str]) -> List[str]:
path_map = self._dedupe_existing_paths(raw_paths)
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
for original_path in unique_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return unique_paths
def _apply_library_paths(self, folder_paths: Mapping[str, Iterable[str]]) -> None:
self._path_mappings.clear()
self._preview_root_paths = set()
lora_paths = folder_paths.get('loras', []) or []
checkpoint_paths = folder_paths.get('checkpoints', []) or []
unet_paths = folder_paths.get('unet', []) or []
embedding_paths = folder_paths.get('embeddings', []) or []
self.loras_roots = self._prepare_lora_paths(lora_paths)
self.base_models_roots = self._prepare_checkpoint_paths(checkpoint_paths, unet_paths)
self.embeddings_roots = self._prepare_embedding_paths(embedding_paths)
self._scan_symbolic_links()
self._rebuild_preview_roots()
def _init_lora_paths(self) -> List[str]:
"""Initialize and validate LoRA paths from ComfyUI settings"""
try:
raw_paths = folder_paths.get_folder_paths("loras")
# Normalize and resolve symlinks, store mapping from resolved -> original
path_map = {}
for path in raw_paths:
if os.path.exists(path):
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
path_map[real_path] = path_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
# Now sort and use only the deduplicated real paths
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
unique_paths = self._prepare_lora_paths(raw_paths)
logger.info("Found LoRA roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
if not unique_paths:
logger.warning("No valid loras folders found in ComfyUI configuration")
return []
for original_path in unique_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return unique_paths
except Exception as e:
logger.warning(f"Error initializing LoRA paths: {e}")
@@ -188,52 +364,17 @@ class Config:
def _init_checkpoint_paths(self) -> List[str]:
"""Initialize and validate checkpoint paths from ComfyUI settings"""
try:
# Get checkpoint paths from folder_paths
raw_checkpoint_paths = folder_paths.get_folder_paths("checkpoints")
raw_unet_paths = folder_paths.get_folder_paths("unet")
# Normalize and resolve symlinks for checkpoints, store mapping from resolved -> original
checkpoint_map = {}
for path in raw_checkpoint_paths:
if os.path.exists(path):
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
checkpoint_map[real_path] = checkpoint_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
# Normalize and resolve symlinks for unet, store mapping from resolved -> original
unet_map = {}
for path in raw_unet_paths:
if os.path.exists(path):
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
unet_map[real_path] = unet_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
# Merge both maps and deduplicate by real path
merged_map = {}
for real_path, orig_path in {**checkpoint_map, **unet_map}.items():
if real_path not in merged_map:
merged_map[real_path] = orig_path
unique_paths = self._prepare_checkpoint_paths(raw_checkpoint_paths, raw_unet_paths)
# Now sort and use only the deduplicated real paths
unique_paths = sorted(merged_map.values(), key=lambda p: p.lower())
# Split back into checkpoints and unet roots for class properties
self.checkpoints_roots = [p for p in unique_paths if p in checkpoint_map.values()]
self.unet_roots = [p for p in unique_paths if p in unet_map.values()]
all_paths = unique_paths
logger.info("Found checkpoint roots:" + ("\n - " + "\n - ".join(all_paths) if all_paths else "[]"))
if not all_paths:
logger.info("Found checkpoint roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
if not unique_paths:
logger.warning("No valid checkpoint folders found in ComfyUI configuration")
return []
# Initialize path mappings
for original_path in all_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return all_paths
return unique_paths
except Exception as e:
logger.warning(f"Error initializing checkpoint paths: {e}")
return []
@@ -242,27 +383,13 @@ class Config:
"""Initialize and validate embedding paths from ComfyUI settings"""
try:
raw_paths = folder_paths.get_folder_paths("embeddings")
# Normalize and resolve symlinks, store mapping from resolved -> original
path_map = {}
for path in raw_paths:
if os.path.exists(path):
real_path = os.path.normpath(os.path.realpath(path)).replace(os.sep, '/')
path_map[real_path] = path_map.get(real_path, path.replace(os.sep, "/")) # preserve first seen
# Now sort and use only the deduplicated real paths
unique_paths = sorted(path_map.values(), key=lambda p: p.lower())
unique_paths = self._prepare_embedding_paths(raw_paths)
logger.info("Found embedding roots:" + ("\n - " + "\n - ".join(unique_paths) if unique_paths else "[]"))
if not unique_paths:
logger.warning("No valid embeddings folders found in ComfyUI configuration")
return []
for original_path in unique_paths:
real_path = os.path.normpath(os.path.realpath(original_path)).replace(os.sep, '/')
if real_path != original_path:
self.add_path_mapping(original_path, real_path)
return unique_paths
except Exception as e:
logger.warning(f"Error initializing embedding paths: {e}")
@@ -271,25 +398,61 @@ class Config:
def get_preview_static_url(self, preview_path: str) -> str:
if not preview_path:
return ""
real_path = os.path.realpath(preview_path).replace(os.sep, '/')
# Find longest matching path (most specific match)
best_match = ""
best_route = ""
for path, route in self._route_mappings.items():
if real_path.startswith(path) and len(path) > len(best_match):
best_match = path
best_route = route
if best_match:
relative_path = os.path.relpath(real_path, best_match).replace(os.sep, '/')
safe_parts = [urllib.parse.quote(part) for part in relative_path.split('/')]
safe_path = '/'.join(safe_parts)
return f'{best_route}/{safe_path}'
return ""
normalized = os.path.normpath(preview_path).replace(os.sep, '/')
encoded_path = urllib.parse.quote(normalized, safe='')
return f'/api/lm/previews?path={encoded_path}'
def is_preview_path_allowed(self, preview_path: str) -> bool:
"""Return ``True`` if ``preview_path`` is within an allowed directory."""
if not preview_path:
return False
try:
candidate = Path(preview_path).expanduser().resolve(strict=False)
except Exception:
return False
for root in self._preview_root_paths:
try:
candidate.relative_to(root)
return True
except ValueError:
continue
return False
def apply_library_settings(self, library_config: Mapping[str, object]) -> None:
"""Update runtime paths to match the provided library configuration."""
folder_paths = library_config.get('folder_paths') if isinstance(library_config, Mapping) else {}
if not isinstance(folder_paths, Mapping):
folder_paths = {}
self._apply_library_paths(folder_paths)
logger.info(
"Applied library settings with %d lora roots, %d checkpoint roots, and %d embedding roots",
len(self.loras_roots or []),
len(self.base_models_roots or []),
len(self.embeddings_roots or []),
)
def get_library_registry_snapshot(self) -> Dict[str, object]:
"""Return the current library registry and active library name."""
try:
from .services.settings_manager import settings as settings_service
libraries = settings_service.get_libraries()
active_library = settings_service.get_active_library_name()
return {
"active_library": active_library,
"libraries": libraries,
}
except Exception as exc: # pragma: no cover - defensive logging
logger.debug("Failed to collect library registry snapshot: %s", exc)
return {"active_library": "", "libraries": {}}
# Global config instance
config = Config()

View File

@@ -2,7 +2,6 @@ import asyncio
import sys
import os
import logging
from pathlib import Path
from server import PromptServer # type: ignore
from .config import config
@@ -11,11 +10,13 @@ from .routes.recipe_routes import RecipeRoutes
from .routes.stats_routes import StatsRoutes
from .routes.update_routes import UpdateRoutes
from .routes.misc_routes import MiscRoutes
from .routes.preview_routes import PreviewRoutes
from .routes.example_images_routes import ExampleImagesRoutes
from .services.service_registry import ServiceRegistry
from .services.settings_manager import settings
from .utils.example_images_migration import ExampleImagesMigration
from .services.websocket_manager import ws_manager
from .services.example_images_cleanup_service import ExampleImagesCleanupService
logger = logging.getLogger(__name__)
@@ -49,102 +50,12 @@ class LoraManager:
asyncio_logger = logging.getLogger("asyncio")
asyncio_logger.addFilter(ConnectionResetFilter())
added_targets = set() # Track already added target paths
# Add static route for example images if the path exists in settings
example_images_path = settings.get('example_images_path')
logger.info(f"Example images path: {example_images_path}")
if example_images_path and os.path.exists(example_images_path):
app.router.add_static('/example_images_static', example_images_path)
logger.info(f"Added static route for example images: /example_images_static -> {example_images_path}")
# Add static routes for each lora root
for idx, root in enumerate(config.loras_roots, start=1):
preview_path = f'/loras_static/root{idx}/preview'
real_root = root
if root in config._path_mappings.values():
for target, link in config._path_mappings.items():
if link == root:
real_root = target
break
# Add static route for original path
app.router.add_static(preview_path, real_root)
logger.info(f"Added static route {preview_path} -> {real_root}")
# Record route mapping
config.add_route_mapping(real_root, preview_path)
added_targets.add(real_root)
# Add static routes for each checkpoint root
for idx, root in enumerate(config.base_models_roots, start=1):
preview_path = f'/checkpoints_static/root{idx}/preview'
real_root = root
if root in config._path_mappings.values():
for target, link in config._path_mappings.items():
if link == root:
real_root = target
break
# Add static route for original path
app.router.add_static(preview_path, real_root)
logger.info(f"Added static route {preview_path} -> {real_root}")
# Record route mapping
config.add_route_mapping(real_root, preview_path)
added_targets.add(real_root)
# Add static routes for each embedding root
for idx, root in enumerate(config.embeddings_roots, start=1):
preview_path = f'/embeddings_static/root{idx}/preview'
real_root = root
if root in config._path_mappings.values():
for target, link in config._path_mappings.items():
if link == root:
real_root = target
break
# Add static route for original path
app.router.add_static(preview_path, real_root)
logger.info(f"Added static route {preview_path} -> {real_root}")
# Record route mapping
config.add_route_mapping(real_root, preview_path)
added_targets.add(real_root)
# Add static routes for symlink target paths
link_idx = {
'lora': 1,
'checkpoint': 1,
'embedding': 1
}
for target_path, link_path in config._path_mappings.items():
if target_path not in added_targets:
# Determine if this is a checkpoint, lora, or embedding link based on path
is_checkpoint = any(cp_root in link_path for cp_root in config.base_models_roots)
is_checkpoint = is_checkpoint or any(cp_root in target_path for cp_root in config.base_models_roots)
is_embedding = any(emb_root in link_path for emb_root in config.embeddings_roots)
is_embedding = is_embedding or any(emb_root in target_path for emb_root in config.embeddings_roots)
if is_checkpoint:
route_path = f'/checkpoints_static/link_{link_idx["checkpoint"]}/preview'
link_idx["checkpoint"] += 1
elif is_embedding:
route_path = f'/embeddings_static/link_{link_idx["embedding"]}/preview'
link_idx["embedding"] += 1
else:
route_path = f'/loras_static/link_{link_idx["lora"]}/preview'
link_idx["lora"] += 1
try:
app.router.add_static(route_path, Path(target_path).resolve(strict=False))
logger.info(f"Added static route for link target {route_path} -> {target_path}")
config.add_route_mapping(target_path, route_path)
added_targets.add(target_path)
except Exception as e:
logger.warning(f"Failed to add static route on initialization for {target_path}: {e}")
continue
# Add static route for locales JSON files
if os.path.exists(config.i18n_path):
@@ -166,7 +77,8 @@ class LoraManager:
RecipeRoutes.setup_routes(app)
UpdateRoutes.setup_routes(app)
MiscRoutes.setup_routes(app)
ExampleImagesRoutes.setup_routes(app)
ExampleImagesRoutes.setup_routes(app, ws_manager=ws_manager)
PreviewRoutes.setup_routes(app)
# Setup WebSocket routes that are shared across all model types
app.router.add_get('/ws/fetch-progress', ws_manager.handle_connection)
@@ -240,7 +152,6 @@ class LoraManager:
# Run post-initialization tasks
post_tasks = [
asyncio.create_task(cls._cleanup_backup_files(), name='cleanup_bak_files'),
asyncio.create_task(cls._cleanup_example_images_folders(), name='cleanup_example_images'),
# Add more post-initialization tasks here as needed
# asyncio.create_task(cls._another_post_task(), name='another_task'),
]
@@ -352,116 +263,37 @@ class LoraManager:
@classmethod
async def _cleanup_example_images_folders(cls):
"""Clean up invalid or empty folders in example images directory"""
"""Invoke the example images cleanup service for manual execution."""
try:
example_images_path = settings.get('example_images_path')
if not example_images_path or not os.path.exists(example_images_path):
logger.debug("Example images path not configured or doesn't exist, skipping cleanup")
return
logger.debug(f"Starting cleanup of example images folders in: {example_images_path}")
# Get all scanner instances to check hash validity
lora_scanner = await ServiceRegistry.get_lora_scanner()
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
total_folders_checked = 0
empty_folders_removed = 0
orphaned_folders_removed = 0
# Scan the example images directory
try:
with os.scandir(example_images_path) as it:
for entry in it:
if not entry.is_dir(follow_symlinks=False):
continue
folder_name = entry.name
folder_path = entry.path
total_folders_checked += 1
try:
# Check if folder is empty
is_empty = cls._is_folder_empty(folder_path)
if is_empty:
logger.debug(f"Removing empty example images folder: {folder_name}")
await cls._remove_folder_safely(folder_path)
empty_folders_removed += 1
continue
# Check if folder name is a valid SHA256 hash (64 hex characters)
if len(folder_name) != 64 or not all(c in '0123456789abcdefABCDEF' for c in folder_name):
# Skip non-hash folders to avoid deleting other content
logger.debug(f"Skipping non-hash folder: {folder_name}")
continue
# Check if hash exists in any of the scanners
hash_exists = (
lora_scanner.has_hash(folder_name) or
checkpoint_scanner.has_hash(folder_name) or
embedding_scanner.has_hash(folder_name)
)
if not hash_exists:
logger.debug(f"Removing example images folder for deleted model: {folder_name}")
await cls._remove_folder_safely(folder_path)
orphaned_folders_removed += 1
continue
service = ExampleImagesCleanupService()
result = await service.cleanup_example_image_folders()
except Exception as e:
logger.error(f"Error processing example images folder {folder_name}: {e}")
# Yield control periodically
await asyncio.sleep(0.01)
except Exception as e:
logger.error(f"Error scanning example images directory: {e}")
return
# Log final cleanup report
total_removed = empty_folders_removed + orphaned_folders_removed
if total_removed > 0:
logger.info(f"Example images cleanup completed: checked {total_folders_checked} folders, "
f"removed {empty_folders_removed} empty folders and {orphaned_folders_removed} "
f"folders for deleted models (total: {total_removed} removed)")
if result.get('success'):
logger.debug(
"Manual example images cleanup completed: moved=%s",
result.get('moved_total'),
)
elif result.get('partial_success'):
logger.warning(
"Manual example images cleanup partially succeeded: moved=%s failures=%s",
result.get('moved_total'),
result.get('move_failures'),
)
else:
logger.debug(f"Example images cleanup completed: checked {total_folders_checked} folders, "
f"no cleanup needed")
except Exception as e:
logger.debug(
"Manual example images cleanup skipped or failed: %s",
result.get('error', 'no changes'),
)
return result
except Exception as e: # pragma: no cover - defensive guard
logger.error(f"Error during example images cleanup: {e}", exc_info=True)
@classmethod
def _is_folder_empty(cls, folder_path: str) -> bool:
"""Check if a folder is empty
Args:
folder_path: Path to the folder to check
Returns:
bool: True if folder is empty, False otherwise
"""
try:
with os.scandir(folder_path) as it:
return not any(it)
except Exception as e:
logger.debug(f"Error checking if folder is empty {folder_path}: {e}")
return False
@classmethod
async def _remove_folder_safely(cls, folder_path: str):
"""Safely remove a folder and all its contents
Args:
folder_path: Path to the folder to remove
"""
try:
import shutil
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, shutil.rmtree, folder_path)
except Exception as e:
logger.warning(f"Failed to remove folder {folder_path}: {e}")
return {
'success': False,
'error': str(e),
'error_code': 'unexpected_error',
}
@classmethod
async def _cleanup(cls, app):

View File

@@ -1,9 +1,7 @@
import os
import importlib
import sys
# Check if running in standalone mode
standalone_mode = 'nodes' not in sys.modules
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
if not standalone_mode:
from .metadata_hook import MetadataHook

View File

@@ -1,9 +1,9 @@
import json
import sys
import os
from .constants import IMAGES
# Check if running in standalone mode
standalone_mode = 'nodes' not in sys.modules
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
from .constants import MODELS, PROMPTS, SAMPLING, LORAS, SIZE, IS_SAMPLER

View File

@@ -284,7 +284,59 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
logger.error(f"Error fetching Civitai info for model ID {version_id}: {e}")
result["loras"].append(lora_entry)
# If we found LoRA hashes in the metadata but haven't already
# populated entries for them, fall back to creating LoRAs from
# the hashes section. Some Civitai image responses only include
# LoRA information here without explicit resources entries.
for lora_name, lora_hash in lora_hashes.items():
if not lora_hash:
continue
# Skip LoRAs we've already added via resources or other fields
if lora_hash in added_loras:
continue
lora_entry = {
'name': lora_name,
'type': "lora",
'weight': 1.0,
'hash': lora_hash,
'existsLocally': False,
'localPath': None,
'file_name': lora_name,
'thumbnailUrl': '/loras_static/images/no-preview.png',
'baseModel': '',
'size': 0,
'downloadUrl': '',
'isDeleted': False
}
if metadata_provider:
try:
civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
populated_entry = await self.populate_lora_from_civitai(
lora_entry,
civitai_info,
recipe_scanner,
base_model_counts,
lora_hash
)
if populated_entry is None:
continue
lora_entry = populated_entry
if 'id' in lora_entry and lora_entry['id']:
added_loras[str(lora_entry['id'])] = len(result["loras"])
except Exception as e:
logger.error(f"Error fetching Civitai info for LoRA hash {lora_hash}: {e}")
added_loras[lora_hash] = len(result["loras"])
result["loras"].append(lora_entry)
# Check for LoRA info in the format "Lora_0 Model hash", "Lora_0 Model name", etc.
lora_index = 0
while f"Lora_{lora_index} Model hash" in metadata and f"Lora_{lora_index} Model name" in metadata:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,217 @@
"""Base infrastructure shared across recipe routes."""
from __future__ import annotations
import logging
import os
from typing import Callable, Mapping
import jinja2
from aiohttp import web
from ..config import config
from ..recipes import RecipeParserFactory
from ..services.downloader import get_downloader
from ..services.recipes import (
RecipeAnalysisService,
RecipePersistenceService,
RecipeSharingService,
)
from ..services.server_i18n import server_i18n
from ..services.service_registry import ServiceRegistry
from ..services.settings_manager import settings
from ..utils.constants import CARD_PREVIEW_WIDTH
from ..utils.exif_utils import ExifUtils
from .handlers.recipe_handlers import (
RecipeAnalysisHandler,
RecipeHandlerSet,
RecipeListingHandler,
RecipeManagementHandler,
RecipePageView,
RecipeQueryHandler,
RecipeSharingHandler,
)
from .recipe_route_registrar import ROUTE_DEFINITIONS
logger = logging.getLogger(__name__)
class BaseRecipeRoutes:
"""Common dependency and startup wiring for recipe routes."""
_HANDLER_NAMES: tuple[str, ...] = tuple(
definition.handler_name for definition in ROUTE_DEFINITIONS
)
template_name: str = "recipes.html"
def __init__(self) -> None:
self.recipe_scanner = None
self.lora_scanner = None
self.civitai_client = None
self.settings = settings
self.server_i18n = server_i18n
self.template_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(config.templates_path),
autoescape=True,
)
self._i18n_registered = False
self._startup_hooks_registered = False
self._handler_set: RecipeHandlerSet | None = None
self._handler_mapping: dict[str, Callable] | None = None
async def attach_dependencies(self, app: web.Application | None = None) -> None:
"""Resolve shared services from the registry."""
await self._ensure_services()
self._ensure_i18n_filter()
async def ensure_dependencies_ready(self) -> None:
"""Ensure dependencies are available for request handlers."""
if self.recipe_scanner is None or self.civitai_client is None:
await self.attach_dependencies()
def register_startup_hooks(self, app: web.Application) -> None:
"""Register startup hooks once for dependency wiring."""
if self._startup_hooks_registered:
return
app.on_startup.append(self.attach_dependencies)
app.on_startup.append(self.prewarm_cache)
self._startup_hooks_registered = True
async def prewarm_cache(self, app: web.Application | None = None) -> None:
"""Pre-load recipe and LoRA caches on startup."""
try:
await self.attach_dependencies(app)
if self.lora_scanner is not None:
await self.lora_scanner.get_cached_data()
hash_index = getattr(self.lora_scanner, "_hash_index", None)
if hash_index is not None and hasattr(hash_index, "_hash_to_path"):
_ = len(hash_index._hash_to_path)
if self.recipe_scanner is not None:
await self.recipe_scanner.get_cached_data(force_refresh=True)
except Exception as exc:
logger.error("Error pre-warming recipe cache: %s", exc, exc_info=True)
def to_route_mapping(self) -> Mapping[str, Callable]:
"""Return a mapping of handler name to coroutine for registrar binding."""
if self._handler_mapping is None:
handler_set = self._create_handler_set()
self._handler_set = handler_set
self._handler_mapping = handler_set.to_route_mapping()
return self._handler_mapping
# Internal helpers -------------------------------------------------
async def _ensure_services(self) -> None:
if self.recipe_scanner is None:
self.recipe_scanner = await ServiceRegistry.get_recipe_scanner()
self.lora_scanner = getattr(self.recipe_scanner, "_lora_scanner", None)
if self.civitai_client is None:
self.civitai_client = await ServiceRegistry.get_civitai_client()
def _ensure_i18n_filter(self) -> None:
if not self._i18n_registered:
self.template_env.filters["t"] = self.server_i18n.create_template_filter()
self._i18n_registered = True
def get_handler_owner(self):
"""Return the object supplying bound handler coroutines."""
if self._handler_set is None:
self._handler_set = self._create_handler_set()
return self._handler_set
def _create_handler_set(self) -> RecipeHandlerSet:
recipe_scanner_getter = lambda: self.recipe_scanner
civitai_client_getter = lambda: self.civitai_client
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
if not standalone_mode:
from ..metadata_collector import get_metadata # type: ignore[import-not-found]
from ..metadata_collector.metadata_processor import ( # type: ignore[import-not-found]
MetadataProcessor,
)
from ..metadata_collector.metadata_registry import ( # type: ignore[import-not-found]
MetadataRegistry,
)
else: # pragma: no cover - optional dependency path
get_metadata = None # type: ignore[assignment]
MetadataProcessor = None # type: ignore[assignment]
MetadataRegistry = None # type: ignore[assignment]
analysis_service = RecipeAnalysisService(
exif_utils=ExifUtils,
recipe_parser_factory=RecipeParserFactory,
downloader_factory=get_downloader,
metadata_collector=get_metadata,
metadata_processor_cls=MetadataProcessor,
metadata_registry_cls=MetadataRegistry,
standalone_mode=standalone_mode,
logger=logger,
)
persistence_service = RecipePersistenceService(
exif_utils=ExifUtils,
card_preview_width=CARD_PREVIEW_WIDTH,
logger=logger,
)
sharing_service = RecipeSharingService(logger=logger)
page_view = RecipePageView(
ensure_dependencies_ready=self.ensure_dependencies_ready,
settings_service=self.settings,
server_i18n=self.server_i18n,
template_env=self.template_env,
template_name=self.template_name,
recipe_scanner_getter=recipe_scanner_getter,
logger=logger,
)
listing = RecipeListingHandler(
ensure_dependencies_ready=self.ensure_dependencies_ready,
recipe_scanner_getter=recipe_scanner_getter,
logger=logger,
)
query = RecipeQueryHandler(
ensure_dependencies_ready=self.ensure_dependencies_ready,
recipe_scanner_getter=recipe_scanner_getter,
format_recipe_file_url=listing.format_recipe_file_url,
logger=logger,
)
management = RecipeManagementHandler(
ensure_dependencies_ready=self.ensure_dependencies_ready,
recipe_scanner_getter=recipe_scanner_getter,
logger=logger,
persistence_service=persistence_service,
analysis_service=analysis_service,
)
analysis = RecipeAnalysisHandler(
ensure_dependencies_ready=self.ensure_dependencies_ready,
recipe_scanner_getter=recipe_scanner_getter,
civitai_client_getter=civitai_client_getter,
logger=logger,
analysis_service=analysis_service,
)
sharing = RecipeSharingHandler(
ensure_dependencies_ready=self.ensure_dependencies_ready,
recipe_scanner_getter=recipe_scanner_getter,
logger=logger,
sharing_service=sharing_service,
)
return RecipeHandlerSet(
page_view=page_view,
listing=listing,
query=query,
management=management,
analysis=analysis,
sharing=sharing,
)

View File

@@ -2,9 +2,9 @@ import logging
from aiohttp import web
from .base_model_routes import BaseModelRoutes
from .model_route_registrar import ModelRouteRegistrar
from ..services.checkpoint_service import CheckpointService
from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_default_metadata_provider
from ..config import config
logger = logging.getLogger(__name__)
@@ -14,8 +14,7 @@ class CheckpointRoutes(BaseModelRoutes):
def __init__(self):
"""Initialize Checkpoint routes with Checkpoint service"""
# Service will be initialized later via setup_routes
self.service = None
super().__init__()
self.template_name = "checkpoints.html"
async def initialize_services(self):
@@ -23,8 +22,8 @@ class CheckpointRoutes(BaseModelRoutes):
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
self.service = CheckpointService(checkpoint_scanner)
# Initialize parent with the service
super().__init__(self.service)
# Attach service dependencies
self.attach_service(self.service)
def setup_routes(self, app: web.Application):
"""Setup Checkpoint routes"""
@@ -34,14 +33,14 @@ class CheckpointRoutes(BaseModelRoutes):
# Setup common routes with 'checkpoints' prefix (includes page route)
super().setup_routes(app, 'checkpoints')
def setup_specific_routes(self, app: web.Application, prefix: str):
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
"""Setup Checkpoint-specific routes"""
# Checkpoint info by name
app.router.add_get(f'/api/lm/{prefix}/info/{{name}}', self.get_checkpoint_info)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/info/{name}', prefix, self.get_checkpoint_info)
# Checkpoint roots and Unet roots
app.router.add_get(f'/api/lm/{prefix}/checkpoints_roots', self.get_checkpoints_roots)
app.router.add_get(f'/api/lm/{prefix}/unet_roots', self.get_unet_roots)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/checkpoints_roots', prefix, self.get_checkpoints_roots)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/unet_roots', prefix, self.get_unet_roots)
def _validate_civitai_model_type(self, model_type: str) -> bool:
"""Validate CivitAI model type for Checkpoint"""

View File

@@ -2,9 +2,9 @@ import logging
from aiohttp import web
from .base_model_routes import BaseModelRoutes
from .model_route_registrar import ModelRouteRegistrar
from ..services.embedding_service import EmbeddingService
from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__)
@@ -13,8 +13,7 @@ class EmbeddingRoutes(BaseModelRoutes):
def __init__(self):
"""Initialize Embedding routes with Embedding service"""
# Service will be initialized later via setup_routes
self.service = None
super().__init__()
self.template_name = "embeddings.html"
async def initialize_services(self):
@@ -22,8 +21,8 @@ class EmbeddingRoutes(BaseModelRoutes):
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
self.service = EmbeddingService(embedding_scanner)
# Initialize parent with the service
super().__init__(self.service)
# Attach service dependencies
self.attach_service(self.service)
def setup_routes(self, app: web.Application):
"""Setup Embedding routes"""
@@ -33,10 +32,10 @@ class EmbeddingRoutes(BaseModelRoutes):
# Setup common routes with 'embeddings' prefix (includes page route)
super().setup_routes(app, 'embeddings')
def setup_specific_routes(self, app: web.Application, prefix: str):
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
"""Setup Embedding-specific routes"""
# Embedding info by name
app.router.add_get(f'/api/lm/{prefix}/info/{{name}}', self.get_embedding_info)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/info/{name}', prefix, self.get_embedding_info)
def _validate_civitai_model_type(self, model_type: str) -> bool:
"""Validate CivitAI model type for Embedding"""

View File

@@ -0,0 +1,62 @@
"""Route registrar for example image endpoints."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Callable, Iterable, Mapping
from aiohttp import web
@dataclass(frozen=True)
class RouteDefinition:
"""Declarative configuration for a HTTP route."""
method: str
path: str
handler_name: str
ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("POST", "/api/lm/download-example-images", "download_example_images"),
RouteDefinition("POST", "/api/lm/import-example-images", "import_example_images"),
RouteDefinition("GET", "/api/lm/example-images-status", "get_example_images_status"),
RouteDefinition("POST", "/api/lm/pause-example-images", "pause_example_images"),
RouteDefinition("POST", "/api/lm/resume-example-images", "resume_example_images"),
RouteDefinition("POST", "/api/lm/open-example-images-folder", "open_example_images_folder"),
RouteDefinition("GET", "/api/lm/example-image-files", "get_example_image_files"),
RouteDefinition("GET", "/api/lm/has-example-images", "has_example_images"),
RouteDefinition("POST", "/api/lm/delete-example-image", "delete_example_image"),
RouteDefinition("POST", "/api/lm/force-download-example-images", "force_download_example_images"),
RouteDefinition("POST", "/api/lm/cleanup-example-image-folders", "cleanup_example_image_folders"),
)
class ExampleImagesRouteRegistrar:
"""Bind declarative example image routes to an aiohttp router."""
_METHOD_MAP = {
"GET": "add_get",
"POST": "add_post",
"PUT": "add_put",
"DELETE": "add_delete",
}
def __init__(self, app: web.Application) -> None:
self._app = app
def register_routes(
self,
handler_lookup: Mapping[str, Callable[[web.Request], object]],
*,
definitions: Iterable[RouteDefinition] = ROUTE_DEFINITIONS,
) -> None:
"""Register each route definition using the supplied handlers."""
for definition in definitions:
handler = handler_lookup[definition.handler_name]
self._bind_route(definition.method, definition.path, handler)
def _bind_route(self, method: str, path: str, handler: Callable[[web.Request], object]) -> None:
add_method_name = self._METHOD_MAP[method.upper()]
add_method = getattr(self._app.router, add_method_name)
add_method(path, handler)

View File

@@ -1,74 +1,88 @@
from __future__ import annotations
import logging
from ..utils.example_images_download_manager import DownloadManager
from ..utils.example_images_processor import ExampleImagesProcessor
from typing import Callable, Mapping
from aiohttp import web
from .example_images_route_registrar import ExampleImagesRouteRegistrar
from .handlers.example_images_handlers import (
ExampleImagesDownloadHandler,
ExampleImagesFileHandler,
ExampleImagesHandlerSet,
ExampleImagesManagementHandler,
)
from ..services.use_cases.example_images import (
DownloadExampleImagesUseCase,
ImportExampleImagesUseCase,
)
from ..utils.example_images_download_manager import (
DownloadManager,
get_default_download_manager,
)
from ..utils.example_images_file_manager import ExampleImagesFileManager
from ..services.websocket_manager import ws_manager
from ..utils.example_images_processor import ExampleImagesProcessor
from ..services.example_images_cleanup_service import ExampleImagesCleanupService
logger = logging.getLogger(__name__)
class ExampleImagesRoutes:
"""Routes for example images related functionality"""
@staticmethod
def setup_routes(app):
"""Register example images routes"""
app.router.add_post('/api/lm/download-example-images', ExampleImagesRoutes.download_example_images)
app.router.add_post('/api/lm/import-example-images', ExampleImagesRoutes.import_example_images)
app.router.add_get('/api/lm/example-images-status', ExampleImagesRoutes.get_example_images_status)
app.router.add_post('/api/lm/pause-example-images', ExampleImagesRoutes.pause_example_images)
app.router.add_post('/api/lm/resume-example-images', ExampleImagesRoutes.resume_example_images)
app.router.add_post('/api/lm/open-example-images-folder', ExampleImagesRoutes.open_example_images_folder)
app.router.add_get('/api/lm/example-image-files', ExampleImagesRoutes.get_example_image_files)
app.router.add_get('/api/lm/has-example-images', ExampleImagesRoutes.has_example_images)
app.router.add_post('/api/lm/delete-example-image', ExampleImagesRoutes.delete_example_image)
app.router.add_post('/api/lm/force-download-example-images', ExampleImagesRoutes.force_download_example_images)
"""Route controller for example image endpoints."""
@staticmethod
async def download_example_images(request):
"""Download example images for models from Civitai"""
return await DownloadManager.start_download(request)
def __init__(
self,
*,
ws_manager,
download_manager: DownloadManager | None = None,
processor=ExampleImagesProcessor,
file_manager=ExampleImagesFileManager,
cleanup_service: ExampleImagesCleanupService | None = None,
) -> None:
if ws_manager is None:
raise ValueError("ws_manager is required")
self._download_manager = download_manager or get_default_download_manager(ws_manager)
self._processor = processor
self._file_manager = file_manager
self._cleanup_service = cleanup_service or ExampleImagesCleanupService()
self._handler_set: ExampleImagesHandlerSet | None = None
self._handler_mapping: Mapping[str, Callable[[web.Request], web.StreamResponse]] | None = None
@staticmethod
async def get_example_images_status(request):
"""Get the current status of example images download"""
return await DownloadManager.get_status(request)
@classmethod
def setup_routes(cls, app: web.Application, *, ws_manager) -> None:
"""Register routes on the given aiohttp application using default wiring."""
@staticmethod
async def pause_example_images(request):
"""Pause the example images download"""
return await DownloadManager.pause_download(request)
controller = cls(ws_manager=ws_manager)
controller.register(app)
@staticmethod
async def resume_example_images(request):
"""Resume the example images download"""
return await DownloadManager.resume_download(request)
@staticmethod
async def open_example_images_folder(request):
"""Open the example images folder for a specific model"""
return await ExampleImagesFileManager.open_folder(request)
def register(self, app: web.Application) -> None:
"""Bind the controller's handlers to the aiohttp router."""
@staticmethod
async def get_example_image_files(request):
"""Get list of example image files for a specific model"""
return await ExampleImagesFileManager.get_files(request)
registrar = ExampleImagesRouteRegistrar(app)
registrar.register_routes(self.to_route_mapping())
@staticmethod
async def import_example_images(request):
"""Import local example images for a model"""
return await ExampleImagesProcessor.import_images(request)
@staticmethod
async def has_example_images(request):
"""Check if example images folder exists and is not empty for a model"""
return await ExampleImagesFileManager.has_images(request)
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], web.StreamResponse]]:
"""Return the registrar-compatible mapping of handler names to callables."""
@staticmethod
async def delete_example_image(request):
"""Delete a custom example image for a model"""
return await ExampleImagesProcessor.delete_custom_image(request)
if self._handler_mapping is None:
handler_set = self._build_handler_set()
self._handler_set = handler_set
self._handler_mapping = handler_set.to_route_mapping()
return self._handler_mapping
@staticmethod
async def force_download_example_images(request):
"""Force download example images for specific models"""
return await DownloadManager.start_force_download(request)
def _build_handler_set(self) -> ExampleImagesHandlerSet:
logger.debug("Building ExampleImagesHandlerSet with %s, %s, %s", self._download_manager, self._processor, self._file_manager)
download_use_case = DownloadExampleImagesUseCase(download_manager=self._download_manager)
download_handler = ExampleImagesDownloadHandler(download_use_case, self._download_manager)
import_use_case = ImportExampleImagesUseCase(processor=self._processor)
management_handler = ExampleImagesManagementHandler(
import_use_case,
self._processor,
self._cleanup_service,
)
file_handler = ExampleImagesFileHandler(self._file_manager)
return ExampleImagesHandlerSet(
download=download_handler,
management=management_handler,
files=file_handler,
)

View File

@@ -0,0 +1,159 @@
"""Handler set for example image routes."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Callable, Mapping
from aiohttp import web
from ...services.use_cases.example_images import (
DownloadExampleImagesConfigurationError,
DownloadExampleImagesInProgressError,
DownloadExampleImagesUseCase,
ImportExampleImagesUseCase,
ImportExampleImagesValidationError,
)
from ...utils.example_images_download_manager import (
DownloadConfigurationError,
DownloadInProgressError,
DownloadNotRunningError,
ExampleImagesDownloadError,
)
from ...utils.example_images_processor import ExampleImagesImportError
class ExampleImagesDownloadHandler:
"""HTTP adapters for download-related example image endpoints."""
def __init__(
self,
download_use_case: DownloadExampleImagesUseCase,
download_manager,
) -> None:
self._download_use_case = download_use_case
self._download_manager = download_manager
async def download_example_images(self, request: web.Request) -> web.StreamResponse:
try:
payload = await request.json()
result = await self._download_use_case.execute(payload)
return web.json_response(result)
except DownloadExampleImagesInProgressError as exc:
response = {
'success': False,
'error': str(exc),
'status': exc.progress,
}
return web.json_response(response, status=400)
except DownloadExampleImagesConfigurationError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=400)
except ExampleImagesDownloadError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=500)
async def get_example_images_status(self, request: web.Request) -> web.StreamResponse:
result = await self._download_manager.get_status(request)
return web.json_response(result)
async def pause_example_images(self, request: web.Request) -> web.StreamResponse:
try:
result = await self._download_manager.pause_download(request)
return web.json_response(result)
except DownloadNotRunningError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=400)
async def resume_example_images(self, request: web.Request) -> web.StreamResponse:
try:
result = await self._download_manager.resume_download(request)
return web.json_response(result)
except DownloadNotRunningError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=400)
async def force_download_example_images(self, request: web.Request) -> web.StreamResponse:
try:
payload = await request.json()
result = await self._download_manager.start_force_download(payload)
return web.json_response(result)
except DownloadInProgressError as exc:
response = {
'success': False,
'error': str(exc),
'status': exc.progress_snapshot,
}
return web.json_response(response, status=400)
except DownloadConfigurationError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=400)
except ExampleImagesDownloadError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=500)
class ExampleImagesManagementHandler:
"""HTTP adapters for import/delete endpoints."""
def __init__(self, import_use_case: ImportExampleImagesUseCase, processor, cleanup_service) -> None:
self._import_use_case = import_use_case
self._processor = processor
self._cleanup_service = cleanup_service
async def import_example_images(self, request: web.Request) -> web.StreamResponse:
try:
result = await self._import_use_case.execute(request)
return web.json_response(result)
except ImportExampleImagesValidationError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=400)
except ExampleImagesImportError as exc:
return web.json_response({'success': False, 'error': str(exc)}, status=500)
async def delete_example_image(self, request: web.Request) -> web.StreamResponse:
return await self._processor.delete_custom_image(request)
async def cleanup_example_image_folders(self, request: web.Request) -> web.StreamResponse:
result = await self._cleanup_service.cleanup_example_image_folders()
if result.get('success') or result.get('partial_success'):
return web.json_response(result, status=200)
error_code = result.get('error_code')
status = 400 if error_code in {'path_not_configured', 'path_not_found'} else 500
return web.json_response(result, status=status)
class ExampleImagesFileHandler:
"""HTTP adapters for filesystem-centric endpoints."""
def __init__(self, file_manager) -> None:
self._file_manager = file_manager
async def open_example_images_folder(self, request: web.Request) -> web.StreamResponse:
return await self._file_manager.open_folder(request)
async def get_example_image_files(self, request: web.Request) -> web.StreamResponse:
return await self._file_manager.get_files(request)
async def has_example_images(self, request: web.Request) -> web.StreamResponse:
return await self._file_manager.has_images(request)
@dataclass(frozen=True)
class ExampleImagesHandlerSet:
"""Aggregate of handlers exposed to the registrar."""
download: ExampleImagesDownloadHandler
management: ExampleImagesManagementHandler
files: ExampleImagesFileHandler
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], web.StreamResponse]]:
"""Flatten handler methods into the registrar mapping."""
return {
"download_example_images": self.download.download_example_images,
"get_example_images_status": self.download.get_example_images_status,
"pause_example_images": self.download.pause_example_images,
"resume_example_images": self.download.resume_example_images,
"force_download_example_images": self.download.force_download_example_images,
"import_example_images": self.management.import_example_images,
"delete_example_image": self.management.delete_example_image,
"cleanup_example_image_folders": self.management.cleanup_example_image_folders,
"open_example_images_folder": self.files.open_example_images_folder,
"get_example_image_files": self.files.get_example_image_files,
"has_example_images": self.files.has_example_images,
}

View File

@@ -0,0 +1,795 @@
"""Handlers for miscellaneous routes.
The legacy :mod:`py.routes.misc_routes` module bundled HTTP wiring and
business logic in a single class. This module mirrors the model route
architecture by splitting the responsibilities into dedicated handler
objects that can be composed by the route controller.
"""
from __future__ import annotations
import asyncio
import logging
import os
import subprocess
import sys
from dataclasses import dataclass
from typing import Awaitable, Callable, Dict, Mapping, Protocol
from aiohttp import web
from ...config import config
from ...services.metadata_service import (
get_metadata_archive_manager,
update_metadata_providers,
)
from ...services.service_registry import ServiceRegistry
from ...services.settings_manager import settings as default_settings
from ...services.websocket_manager import ws_manager
from ...services.downloader import get_downloader
from ...utils.constants import DEFAULT_NODE_COLOR, NODE_TYPES, SUPPORTED_MEDIA_EXTENSIONS
from ...utils.example_images_paths import is_valid_example_images_root
from ...utils.lora_metadata import extract_trained_words
from ...utils.usage_stats import UsageStats
logger = logging.getLogger(__name__)
class PromptServerProtocol(Protocol):
"""Subset of PromptServer used by the handlers."""
instance: "PromptServerProtocol"
def send_sync(self, event: str, payload: dict) -> None: # pragma: no cover - protocol
...
class DownloaderProtocol(Protocol):
async def refresh_session(self) -> None: # pragma: no cover - protocol
...
class UsageStatsFactory(Protocol):
def __call__(self) -> UsageStats: # pragma: no cover - protocol
...
class MetadataProviderProtocol(Protocol):
async def get_model_versions(self, model_id: int) -> dict | None: # pragma: no cover - protocol
...
class MetadataArchiveManagerProtocol(Protocol):
async def download_and_extract_database(
self, progress_callback: Callable[[str, str], None]
) -> bool: # pragma: no cover - protocol
...
async def remove_database(self) -> bool: # pragma: no cover - protocol
...
def is_database_available(self) -> bool: # pragma: no cover - protocol
...
def get_database_path(self) -> str | None: # pragma: no cover - protocol
...
class NodeRegistry:
"""Thread-safe registry for tracking LoRA nodes in active workflows."""
def __init__(self) -> None:
self._lock = asyncio.Lock()
self._nodes: Dict[int, dict] = {}
self._registry_updated = asyncio.Event()
async def register_nodes(self, nodes: list[dict]) -> None:
async with self._lock:
self._nodes.clear()
for node in nodes:
node_id = node["node_id"]
node_type = node.get("type", "")
type_id = NODE_TYPES.get(node_type, 0)
bgcolor = node.get("bgcolor") or DEFAULT_NODE_COLOR
self._nodes[node_id] = {
"id": node_id,
"bgcolor": bgcolor,
"title": node.get("title"),
"type": type_id,
"type_name": node_type,
}
logger.debug("Registered %s nodes in registry", len(nodes))
self._registry_updated.set()
async def get_registry(self) -> dict:
async with self._lock:
return {
"nodes": dict(self._nodes),
"node_count": len(self._nodes),
}
async def wait_for_update(self, timeout: float = 1.0) -> bool:
self._registry_updated.clear()
try:
await asyncio.wait_for(self._registry_updated.wait(), timeout=timeout)
return True
except asyncio.TimeoutError:
return False
class HealthCheckHandler:
async def health_check(self, request: web.Request) -> web.Response:
return web.json_response({"status": "ok"})
class SettingsHandler:
"""Sync settings between backend and frontend."""
_SYNC_KEYS = (
"civitai_api_key",
"default_lora_root",
"default_checkpoint_root",
"default_embedding_root",
"base_model_path_mappings",
"download_path_templates",
"enable_metadata_archive_db",
"language",
"proxy_enabled",
"proxy_type",
"proxy_host",
"proxy_port",
"proxy_username",
"proxy_password",
"example_images_path",
"optimize_example_images",
"auto_download_example_images",
"blur_mature_content",
"autoplay_on_hover",
"display_density",
"card_info_display",
"include_trigger_words",
"show_only_sfw",
"compact_mode",
)
_PROXY_KEYS = {"proxy_enabled", "proxy_host", "proxy_port", "proxy_username", "proxy_password", "proxy_type"}
def __init__(
self,
*,
settings_service=default_settings,
metadata_provider_updater: Callable[[], Awaitable[None]] = update_metadata_providers,
downloader_factory: Callable[[], Awaitable[DownloaderProtocol]] = get_downloader,
) -> None:
self._settings = settings_service
self._metadata_provider_updater = metadata_provider_updater
self._downloader_factory = downloader_factory
async def get_libraries(self, request: web.Request) -> web.Response:
"""Return the registered libraries and the active selection."""
try:
snapshot = config.get_library_registry_snapshot()
libraries = snapshot.get("libraries", {})
active_library = snapshot.get("active_library", "")
return web.json_response(
{
"success": True,
"libraries": libraries,
"active_library": active_library,
}
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error getting library registry: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_settings(self, request: web.Request) -> web.Response:
try:
response_data = {}
for key in self._SYNC_KEYS:
value = self._settings.get(key)
if value is not None:
response_data[key] = value
return web.json_response({"success": True, "settings": response_data})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error getting settings: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def activate_library(self, request: web.Request) -> web.Response:
"""Activate the selected library."""
try:
data = await request.json()
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error parsing activate library request: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": "Invalid JSON payload"}, status=400)
library_name = data.get("library") or data.get("library_name")
if not isinstance(library_name, str) or not library_name.strip():
return web.json_response(
{"success": False, "error": "Library name is required"}, status=400
)
try:
normalized_name = library_name.strip()
self._settings.activate_library(normalized_name)
snapshot = config.get_library_registry_snapshot()
libraries = snapshot.get("libraries", {})
active_library = snapshot.get("active_library", "")
return web.json_response(
{
"success": True,
"active_library": active_library,
"libraries": libraries,
}
)
except KeyError as exc:
logger.debug("Attempted to activate unknown library '%s'", library_name)
return web.json_response({"success": False, "error": str(exc)}, status=404)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error activating library '%s': %s", library_name, exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def update_settings(self, request: web.Request) -> web.Response:
try:
data = await request.json()
proxy_changed = False
for key, value in data.items():
if value == self._settings.get(key):
continue
if key == "example_images_path" and value:
validation_error = self._validate_example_images_path(value)
if validation_error:
return web.json_response({"success": False, "error": validation_error})
if value == "__DELETE__" and key in ("proxy_username", "proxy_password"):
self._settings.delete(key)
else:
self._settings.set(key, value)
if key == "enable_metadata_archive_db":
await self._metadata_provider_updater()
if key in self._PROXY_KEYS:
proxy_changed = True
if proxy_changed:
downloader = await self._downloader_factory()
await downloader.refresh_session()
return web.json_response({"success": True})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error updating settings: %s", exc, exc_info=True)
return web.Response(status=500, text=str(exc))
def _validate_example_images_path(self, folder_path: str) -> str | None:
if not os.path.exists(folder_path):
return f"Path does not exist: {folder_path}"
if not os.path.isdir(folder_path):
return "Please set a dedicated folder for example images."
if not self._is_dedicated_example_images_folder(folder_path):
return "Please set a dedicated folder for example images."
return None
def _is_dedicated_example_images_folder(self, folder_path: str) -> bool:
return is_valid_example_images_root(folder_path)
class UsageStatsHandler:
def __init__(self, usage_stats_factory: UsageStatsFactory = UsageStats) -> None:
self._usage_stats_factory = usage_stats_factory
async def update_usage_stats(self, request: web.Request) -> web.Response:
try:
data = await request.json()
prompt_id = data.get("prompt_id")
if not prompt_id:
return web.json_response({"success": False, "error": "Missing prompt_id"}, status=400)
usage_stats = self._usage_stats_factory()
await usage_stats.process_execution(prompt_id)
return web.json_response({"success": True})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to update usage stats: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_usage_stats(self, request: web.Request) -> web.Response:
try:
usage_stats = self._usage_stats_factory()
stats = await usage_stats.get_stats()
stats_response = {"success": True, "data": stats, "format_version": 2}
return web.json_response(stats_response)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to get usage stats: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class LoraCodeHandler:
def __init__(self, prompt_server: type[PromptServerProtocol]) -> None:
self._prompt_server = prompt_server
async def update_lora_code(self, request: web.Request) -> web.Response:
try:
data = await request.json()
node_ids = data.get("node_ids")
lora_code = data.get("lora_code", "")
mode = data.get("mode", "append")
if not lora_code:
return web.json_response({"success": False, "error": "Missing lora_code parameter"}, status=400)
results = []
if node_ids is None:
try:
self._prompt_server.instance.send_sync(
"lora_code_update", {"id": -1, "lora_code": lora_code, "mode": mode}
)
results.append({"node_id": "broadcast", "success": True})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error broadcasting lora code: %s", exc)
results.append({"node_id": "broadcast", "success": False, "error": str(exc)})
else:
for node_id in node_ids:
try:
self._prompt_server.instance.send_sync(
"lora_code_update",
{"id": node_id, "lora_code": lora_code, "mode": mode},
)
results.append({"node_id": node_id, "success": True})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error sending lora code to node %s: %s", node_id, exc)
results.append({"node_id": node_id, "success": False, "error": str(exc)})
return web.json_response({"success": True, "results": results})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to update lora code: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class TrainedWordsHandler:
async def get_trained_words(self, request: web.Request) -> web.Response:
try:
file_path = request.query.get("file_path")
if not file_path:
return web.json_response({"success": False, "error": "Missing file_path parameter"}, status=400)
if not os.path.exists(file_path):
return web.json_response({"success": False, "error": "File not found"}, status=404)
if not file_path.endswith(".safetensors"):
return web.json_response({"success": False, "error": "File must be a safetensors file"}, status=400)
trained_words, class_tokens = await extract_trained_words(file_path)
return web.json_response(
{
"success": True,
"trained_words": trained_words,
"class_tokens": class_tokens,
}
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to get trained words: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class ModelExampleFilesHandler:
async def get_model_example_files(self, request: web.Request) -> web.Response:
try:
model_path = request.query.get("model_path")
if not model_path:
return web.json_response({"success": False, "error": "Missing model_path parameter"}, status=400)
model_dir = os.path.dirname(model_path)
if not os.path.exists(model_dir):
return web.json_response({"success": False, "error": "Model directory not found"}, status=404)
base_name = os.path.splitext(os.path.basename(model_path))[0]
files = []
pattern = f"{base_name}.example."
for file in os.listdir(model_dir):
if not file.startswith(pattern):
continue
file_full_path = os.path.join(model_dir, file)
if not os.path.isfile(file_full_path):
continue
file_ext = os.path.splitext(file)[1].lower()
if file_ext not in SUPPORTED_MEDIA_EXTENSIONS["images"] and file_ext not in SUPPORTED_MEDIA_EXTENSIONS["videos"]:
continue
try:
index = int(file[len(pattern) :].split(".")[0])
except (ValueError, IndexError):
index = float("inf")
static_url = config.get_preview_static_url(file_full_path)
files.append(
{
"name": file,
"path": static_url,
"extension": file_ext,
"is_video": file_ext in SUPPORTED_MEDIA_EXTENSIONS["videos"],
"index": index,
}
)
files.sort(key=lambda item: item["index"])
for file in files:
file.pop("index", None)
return web.json_response({"success": True, "files": files})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to get model example files: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
@dataclass
class ServiceRegistryAdapter:
get_lora_scanner: Callable[[], Awaitable]
get_checkpoint_scanner: Callable[[], Awaitable]
get_embedding_scanner: Callable[[], Awaitable]
class ModelLibraryHandler:
def __init__(self, service_registry: ServiceRegistryAdapter, metadata_provider_factory: Callable[[], Awaitable[MetadataProviderProtocol | None]]) -> None:
self._service_registry = service_registry
self._metadata_provider_factory = metadata_provider_factory
async def check_model_exists(self, request: web.Request) -> web.Response:
try:
model_id_str = request.query.get("modelId")
model_version_id_str = request.query.get("modelVersionId")
if not model_id_str:
return web.json_response({"success": False, "error": "Missing required parameter: modelId"}, status=400)
try:
model_id = int(model_id_str)
except ValueError:
return web.json_response({"success": False, "error": "Parameter modelId must be an integer"}, status=400)
lora_scanner = await self._service_registry.get_lora_scanner()
checkpoint_scanner = await self._service_registry.get_checkpoint_scanner()
embedding_scanner = await self._service_registry.get_embedding_scanner()
if model_version_id_str:
try:
model_version_id = int(model_version_id_str)
except ValueError:
return web.json_response({"success": False, "error": "Parameter modelVersionId must be an integer"}, status=400)
exists = False
model_type = None
if await lora_scanner.check_model_version_exists(model_version_id):
exists = True
model_type = "lora"
elif checkpoint_scanner and await checkpoint_scanner.check_model_version_exists(model_version_id):
exists = True
model_type = "checkpoint"
elif embedding_scanner and await embedding_scanner.check_model_version_exists(model_version_id):
exists = True
model_type = "embedding"
return web.json_response({"success": True, "exists": exists, "modelType": model_type if exists else None})
lora_versions = await lora_scanner.get_model_versions_by_id(model_id)
checkpoint_versions = []
embedding_versions = []
if not lora_versions and checkpoint_scanner:
checkpoint_versions = await checkpoint_scanner.get_model_versions_by_id(model_id)
if not lora_versions and not checkpoint_versions and embedding_scanner:
embedding_versions = await embedding_scanner.get_model_versions_by_id(model_id)
model_type = None
versions = []
if lora_versions:
model_type = "lora"
versions = lora_versions
elif checkpoint_versions:
model_type = "checkpoint"
versions = checkpoint_versions
elif embedding_versions:
model_type = "embedding"
versions = embedding_versions
return web.json_response({"success": True, "modelType": model_type, "versions": versions})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to check model existence: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_model_versions_status(self, request: web.Request) -> web.Response:
try:
model_id_str = request.query.get("modelId")
if not model_id_str:
return web.json_response({"success": False, "error": "Missing required parameter: modelId"}, status=400)
try:
model_id = int(model_id_str)
except ValueError:
return web.json_response({"success": False, "error": "Parameter modelId must be an integer"}, status=400)
metadata_provider = await self._metadata_provider_factory()
if not metadata_provider:
return web.json_response({"success": False, "error": "Metadata provider not available"}, status=503)
response = await metadata_provider.get_model_versions(model_id)
if not response or not response.get("modelVersions"):
return web.json_response({"success": False, "error": "Model not found"}, status=404)
versions = response.get("modelVersions", [])
model_name = response.get("name", "")
model_type = response.get("type", "").lower()
scanner = None
normalized_type = None
if model_type in {"lora", "locon", "dora"}:
scanner = await self._service_registry.get_lora_scanner()
normalized_type = "lora"
elif model_type == "checkpoint":
scanner = await self._service_registry.get_checkpoint_scanner()
normalized_type = "checkpoint"
elif model_type == "textualinversion":
scanner = await self._service_registry.get_embedding_scanner()
normalized_type = "embedding"
else:
return web.json_response({"success": False, "error": f'Model type "{model_type}" is not supported'}, status=400)
if not scanner:
return web.json_response({"success": False, "error": f'Scanner for type "{normalized_type}" is not available'}, status=503)
local_versions = await scanner.get_model_versions_by_id(model_id)
local_version_ids = {version["versionId"] for version in local_versions}
enriched_versions = []
for version in versions:
version_id = version.get("id")
enriched_versions.append(
{
"id": version_id,
"name": version.get("name", ""),
"thumbnailUrl": version.get("images")[0]["url"] if version.get("images") else None,
"inLibrary": version_id in local_version_ids,
}
)
return web.json_response(
{
"success": True,
"modelId": model_id,
"modelName": model_name,
"modelType": model_type,
"versions": enriched_versions,
}
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to get model versions status: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class MetadataArchiveHandler:
def __init__(
self,
*,
metadata_archive_manager_factory: Callable[[], Awaitable[MetadataArchiveManagerProtocol]] = get_metadata_archive_manager,
settings_service=default_settings,
metadata_provider_updater: Callable[[], Awaitable[None]] = update_metadata_providers,
) -> None:
self._metadata_archive_manager_factory = metadata_archive_manager_factory
self._settings = settings_service
self._metadata_provider_updater = metadata_provider_updater
async def download_metadata_archive(self, request: web.Request) -> web.Response:
try:
archive_manager = await self._metadata_archive_manager_factory()
download_id = request.query.get("download_id")
def progress_callback(stage: str, message: str) -> None:
data = {"stage": stage, "message": message, "type": "metadata_archive_download"}
if download_id:
asyncio.create_task(ws_manager.broadcast_download_progress(download_id, data))
else:
asyncio.create_task(ws_manager.broadcast(data))
success = await archive_manager.download_and_extract_database(progress_callback)
if success:
self._settings.set("enable_metadata_archive_db", True)
await self._metadata_provider_updater()
return web.json_response({"success": True, "message": "Metadata archive database downloaded and extracted successfully"})
return web.json_response({"success": False, "error": "Failed to download and extract metadata archive database"}, status=500)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error downloading metadata archive: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def remove_metadata_archive(self, request: web.Request) -> web.Response:
try:
archive_manager = await self._metadata_archive_manager_factory()
success = await archive_manager.remove_database()
if success:
self._settings.set("enable_metadata_archive_db", False)
await self._metadata_provider_updater()
return web.json_response({"success": True, "message": "Metadata archive database removed successfully"})
return web.json_response({"success": False, "error": "Failed to remove metadata archive database"}, status=500)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error removing metadata archive: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_metadata_archive_status(self, request: web.Request) -> web.Response:
try:
archive_manager = await self._metadata_archive_manager_factory()
is_available = archive_manager.is_database_available()
is_enabled = self._settings.get("enable_metadata_archive_db", False)
db_size = 0
if is_available:
db_path = archive_manager.get_database_path()
if db_path and os.path.exists(db_path):
db_size = os.path.getsize(db_path)
return web.json_response(
{
"success": True,
"isAvailable": is_available,
"isEnabled": is_enabled,
"databaseSize": db_size,
"databasePath": archive_manager.get_database_path() if is_available else None,
}
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error getting metadata archive status: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class FileSystemHandler:
async def open_file_location(self, request: web.Request) -> web.Response:
try:
data = await request.json()
file_path = data.get("file_path")
if not file_path:
return web.json_response({"success": False, "error": "Missing file_path parameter"}, status=400)
file_path = os.path.abspath(file_path)
if not os.path.isfile(file_path):
return web.json_response({"success": False, "error": "File does not exist"}, status=404)
if os.name == "nt":
subprocess.Popen(["explorer", "/select,", file_path])
elif os.name == "posix":
if sys.platform == "darwin":
subprocess.Popen(["open", "-R", file_path])
else:
folder = os.path.dirname(file_path)
subprocess.Popen(["xdg-open", folder])
return web.json_response({"success": True, "message": f"Opened folder and selected file: {file_path}"})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to open file location: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class NodeRegistryHandler:
def __init__(
self,
node_registry: NodeRegistry,
prompt_server: type[PromptServerProtocol],
*,
standalone_mode: bool,
) -> None:
self._node_registry = node_registry
self._prompt_server = prompt_server
self._standalone_mode = standalone_mode
async def register_nodes(self, request: web.Request) -> web.Response:
try:
data = await request.json()
nodes = data.get("nodes", [])
if not isinstance(nodes, list):
return web.json_response({"success": False, "error": "nodes must be a list"}, status=400)
for index, node in enumerate(nodes):
if not isinstance(node, dict):
return web.json_response({"success": False, "error": f"Node {index} must be an object"}, status=400)
node_id = node.get("node_id")
if node_id is None:
return web.json_response({"success": False, "error": f"Node {index} missing node_id parameter"}, status=400)
try:
node["node_id"] = int(node_id)
except (TypeError, ValueError):
return web.json_response({"success": False, "error": f"Node {index} node_id must be an integer"}, status=400)
await self._node_registry.register_nodes(nodes)
return web.json_response({"success": True, "message": f"{len(nodes)} nodes registered successfully"})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to register nodes: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_registry(self, request: web.Request) -> web.Response:
try:
if self._standalone_mode:
logger.warning("Registry refresh not available in standalone mode")
return web.json_response(
{
"success": False,
"error": "Standalone Mode Active",
"message": "Cannot interact with ComfyUI in standalone mode.",
},
status=503,
)
try:
self._prompt_server.instance.send_sync("lora_registry_refresh", {})
logger.debug("Sent registry refresh request to frontend")
except Exception as exc:
logger.error("Failed to send registry refresh message: %s", exc)
return web.json_response(
{
"success": False,
"error": "Communication Error",
"message": f"Failed to communicate with ComfyUI frontend: {exc}",
},
status=500,
)
registry_updated = await self._node_registry.wait_for_update(timeout=1.0)
if not registry_updated:
logger.warning("Registry refresh timeout after 1 second")
return web.json_response(
{
"success": False,
"error": "Timeout Error",
"message": "Registry refresh timeout - ComfyUI frontend may not be responsive",
},
status=408,
)
registry_info = await self._node_registry.get_registry()
return web.json_response({"success": True, "data": registry_info})
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to get registry: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": "Internal Error", "message": str(exc)}, status=500)
class MiscHandlerSet:
"""Aggregate handlers into a lookup compatible with the registrar."""
def __init__(
self,
*,
health: HealthCheckHandler,
settings: SettingsHandler,
usage_stats: UsageStatsHandler,
lora_code: LoraCodeHandler,
trained_words: TrainedWordsHandler,
model_examples: ModelExampleFilesHandler,
node_registry: NodeRegistryHandler,
model_library: ModelLibraryHandler,
metadata_archive: MetadataArchiveHandler,
filesystem: FileSystemHandler,
) -> None:
self.health = health
self.settings = settings
self.usage_stats = usage_stats
self.lora_code = lora_code
self.trained_words = trained_words
self.model_examples = model_examples
self.node_registry = node_registry
self.model_library = model_library
self.metadata_archive = metadata_archive
self.filesystem = filesystem
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], Awaitable[web.StreamResponse]]]:
return {
"health_check": self.health.health_check,
"get_settings": self.settings.get_settings,
"update_settings": self.settings.update_settings,
"get_settings_libraries": self.settings.get_libraries,
"activate_library": self.settings.activate_library,
"update_usage_stats": self.usage_stats.update_usage_stats,
"get_usage_stats": self.usage_stats.get_usage_stats,
"update_lora_code": self.lora_code.update_lora_code,
"get_trained_words": self.trained_words.get_trained_words,
"get_model_example_files": self.model_examples.get_model_example_files,
"register_nodes": self.node_registry.register_nodes,
"get_registry": self.node_registry.get_registry,
"check_model_exists": self.model_library.check_model_exists,
"download_metadata_archive": self.metadata_archive.download_metadata_archive,
"remove_metadata_archive": self.metadata_archive.remove_metadata_archive,
"get_metadata_archive_status": self.metadata_archive.get_metadata_archive_status,
"get_model_versions_status": self.model_library.get_model_versions_status,
"open_file_location": self.filesystem.open_file_location,
}
def build_service_registry_adapter() -> ServiceRegistryAdapter:
return ServiceRegistryAdapter(
get_lora_scanner=ServiceRegistry.get_lora_scanner,
get_checkpoint_scanner=ServiceRegistry.get_checkpoint_scanner,
get_embedding_scanner=ServiceRegistry.get_embedding_scanner,
)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,56 @@
"""Handlers responsible for serving preview assets dynamically."""
from __future__ import annotations
import logging
import urllib.parse
from pathlib import Path
from aiohttp import web
from ...config import config as global_config
logger = logging.getLogger(__name__)
class PreviewHandler:
"""Serve preview assets for the active library at request time."""
def __init__(self, *, config=global_config) -> None:
self._config = config
async def serve_preview(self, request: web.Request) -> web.StreamResponse:
"""Return the preview file referenced by the encoded ``path`` query."""
raw_path = request.query.get("path", "")
if not raw_path:
raise web.HTTPBadRequest(text="Missing 'path' query parameter")
try:
decoded_path = urllib.parse.unquote(raw_path)
except Exception as exc: # pragma: no cover - defensive guard
logger.debug("Failed to decode preview path %s: %s", raw_path, exc)
raise web.HTTPBadRequest(text="Invalid preview path encoding") from exc
normalized = decoded_path.replace("\\", "/")
candidate = Path(normalized)
try:
resolved = candidate.expanduser().resolve(strict=False)
except Exception as exc:
logger.debug("Failed to resolve preview path %s: %s", normalized, exc)
raise web.HTTPBadRequest(text="Unable to resolve preview path") from exc
resolved_str = str(resolved)
if not self._config.is_preview_path_allowed(resolved_str):
logger.debug("Rejected preview outside allowed roots: %s", resolved_str)
raise web.HTTPForbidden(text="Preview path is not within an allowed directory")
if not resolved.is_file():
logger.debug("Preview file not found at %s", resolved_str)
raise web.HTTPNotFound(text="Preview file not found")
# aiohttp's FileResponse handles range requests and content headers for us.
return web.FileResponse(path=resolved, chunk_size=256 * 1024)
__all__ = ["PreviewHandler"]

View File

@@ -0,0 +1,723 @@
"""Dedicated handler objects for recipe-related routes."""
from __future__ import annotations
import json
import logging
import os
from dataclasses import dataclass
from typing import Any, Awaitable, Callable, Dict, Mapping, Optional
from aiohttp import web
from ...config import config
from ...services.server_i18n import server_i18n as default_server_i18n
from ...services.settings_manager import SettingsManager
from ...services.recipes import (
RecipeAnalysisService,
RecipeDownloadError,
RecipeNotFoundError,
RecipePersistenceService,
RecipeSharingService,
RecipeValidationError,
)
Logger = logging.Logger
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
RecipeScannerGetter = Callable[[], Any]
CivitaiClientGetter = Callable[[], Any]
@dataclass(frozen=True)
class RecipeHandlerSet:
"""Group of handlers providing recipe route implementations."""
page_view: "RecipePageView"
listing: "RecipeListingHandler"
query: "RecipeQueryHandler"
management: "RecipeManagementHandler"
analysis: "RecipeAnalysisHandler"
sharing: "RecipeSharingHandler"
def to_route_mapping(self) -> Mapping[str, Callable[[web.Request], Awaitable[web.StreamResponse]]]:
"""Expose handler coroutines keyed by registrar handler names."""
return {
"render_page": self.page_view.render_page,
"list_recipes": self.listing.list_recipes,
"get_recipe": self.listing.get_recipe,
"analyze_uploaded_image": self.analysis.analyze_uploaded_image,
"analyze_local_image": self.analysis.analyze_local_image,
"save_recipe": self.management.save_recipe,
"delete_recipe": self.management.delete_recipe,
"get_top_tags": self.query.get_top_tags,
"get_base_models": self.query.get_base_models,
"share_recipe": self.sharing.share_recipe,
"download_shared_recipe": self.sharing.download_shared_recipe,
"get_recipe_syntax": self.query.get_recipe_syntax,
"update_recipe": self.management.update_recipe,
"reconnect_lora": self.management.reconnect_lora,
"find_duplicates": self.query.find_duplicates,
"bulk_delete": self.management.bulk_delete,
"save_recipe_from_widget": self.management.save_recipe_from_widget,
"get_recipes_for_lora": self.query.get_recipes_for_lora,
"scan_recipes": self.query.scan_recipes,
}
class RecipePageView:
"""Render the recipe shell page."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
settings_service: SettingsManager,
server_i18n=default_server_i18n,
template_env,
template_name: str,
recipe_scanner_getter: RecipeScannerGetter,
logger: Logger,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._settings = settings_service
self._server_i18n = server_i18n
self._template_env = template_env
self._template_name = template_name
self._recipe_scanner_getter = recipe_scanner_getter
self._logger = logger
async def render_page(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None: # pragma: no cover - defensive guard
raise RuntimeError("Recipe scanner not available")
user_language = self._settings.get("language", "en")
self._server_i18n.set_locale(user_language)
try:
await recipe_scanner.get_cached_data(force_refresh=False)
rendered = self._template_env.get_template(self._template_name).render(
recipes=[],
is_initializing=False,
settings=self._settings,
request=request,
t=self._server_i18n.get_translation,
)
except Exception as cache_error: # pragma: no cover - logging path
self._logger.error("Error loading recipe cache data: %s", cache_error)
rendered = self._template_env.get_template(self._template_name).render(
is_initializing=True,
settings=self._settings,
request=request,
t=self._server_i18n.get_translation,
)
return web.Response(text=rendered, content_type="text/html")
except Exception as exc: # pragma: no cover - logging path
self._logger.error("Error handling recipes request: %s", exc, exc_info=True)
return web.Response(text="Error loading recipes page", status=500)
class RecipeListingHandler:
"""Provide listing and detail APIs for recipes."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
recipe_scanner_getter: RecipeScannerGetter,
logger: Logger,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
self._logger = logger
async def list_recipes(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
page = int(request.query.get("page", "1"))
page_size = int(request.query.get("page_size", "20"))
sort_by = request.query.get("sort_by", "date")
search = request.query.get("search")
search_options = {
"title": request.query.get("search_title", "true").lower() == "true",
"tags": request.query.get("search_tags", "true").lower() == "true",
"lora_name": request.query.get("search_lora_name", "true").lower() == "true",
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
}
filters: Dict[str, list[str]] = {}
base_models = request.query.get("base_models")
if base_models:
filters["base_model"] = base_models.split(",")
tags = request.query.get("tags")
if tags:
filters["tags"] = tags.split(",")
lora_hash = request.query.get("lora_hash")
result = await recipe_scanner.get_paginated_data(
page=page,
page_size=page_size,
sort_by=sort_by,
search=search,
filters=filters,
search_options=search_options,
lora_hash=lora_hash,
)
for item in result.get("items", []):
file_path = item.get("file_path")
if file_path:
item["file_url"] = self.format_recipe_file_url(file_path)
else:
item.setdefault("file_url", "/loras_static/images/no-preview.png")
item.setdefault("loras", [])
item.setdefault("base_model", "")
return web.json_response(result)
except Exception as exc:
self._logger.error("Error retrieving recipes: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def get_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
if not recipe:
return web.json_response({"error": "Recipe not found"}, status=404)
return web.json_response(recipe)
except Exception as exc:
self._logger.error("Error retrieving recipe details: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
def format_recipe_file_url(self, file_path: str) -> str:
try:
normalized_path = os.path.normpath(file_path)
static_url = config.get_preview_static_url(normalized_path)
if static_url:
return static_url
except Exception as exc: # pragma: no cover - logging path
self._logger.error("Error formatting recipe file URL: %s", exc, exc_info=True)
return "/loras_static/images/no-preview.png"
return "/loras_static/images/no-preview.png"
class RecipeQueryHandler:
"""Provide read-only insights on recipe data."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
recipe_scanner_getter: RecipeScannerGetter,
format_recipe_file_url: Callable[[str], str],
logger: Logger,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
self._format_recipe_file_url = format_recipe_file_url
self._logger = logger
async def get_top_tags(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
limit = int(request.query.get("limit", "20"))
cache = await recipe_scanner.get_cached_data()
tag_counts: Dict[str, int] = {}
for recipe in getattr(cache, "raw_data", []):
for tag in recipe.get("tags", []) or []:
tag_counts[tag] = tag_counts.get(tag, 0) + 1
sorted_tags = [{"tag": tag, "count": count} for tag, count in tag_counts.items()]
sorted_tags.sort(key=lambda entry: entry["count"], reverse=True)
return web.json_response({"success": True, "tags": sorted_tags[:limit]})
except Exception as exc:
self._logger.error("Error retrieving top tags: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_base_models(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
cache = await recipe_scanner.get_cached_data()
base_model_counts: Dict[str, int] = {}
for recipe in getattr(cache, "raw_data", []):
base_model = recipe.get("base_model")
if base_model:
base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1
sorted_models = [{"name": model, "count": count} for model, count in base_model_counts.items()]
sorted_models.sort(key=lambda entry: entry["count"], reverse=True)
return web.json_response({"success": True, "base_models": sorted_models})
except Exception as exc:
self._logger.error("Error retrieving base models: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_recipes_for_lora(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
lora_hash = request.query.get("hash")
if not lora_hash:
return web.json_response({"success": False, "error": "Lora hash is required"}, status=400)
matching_recipes = await recipe_scanner.get_recipes_for_lora(lora_hash)
return web.json_response({"success": True, "recipes": matching_recipes})
except Exception as exc:
self._logger.error("Error getting recipes for Lora: %s", exc)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def scan_recipes(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
self._logger.info("Manually triggering recipe cache rebuild")
await recipe_scanner.get_cached_data(force_refresh=True)
return web.json_response({"success": True, "message": "Recipe cache refreshed successfully"})
except Exception as exc:
self._logger.error("Error refreshing recipe cache: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def find_duplicates(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
duplicate_groups = await recipe_scanner.find_all_duplicate_recipes()
response_data = []
for fingerprint, recipe_ids in duplicate_groups.items():
if len(recipe_ids) <= 1:
continue
recipes = []
for recipe_id in recipe_ids:
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
if recipe:
recipes.append(
{
"id": recipe.get("id"),
"title": recipe.get("title"),
"file_url": recipe.get("file_url")
or self._format_recipe_file_url(recipe.get("file_path", "")),
"modified": recipe.get("modified"),
"created_date": recipe.get("created_date"),
"lora_count": len(recipe.get("loras", [])),
}
)
if len(recipes) >= 2:
recipes.sort(key=lambda entry: entry.get("modified", 0), reverse=True)
response_data.append(
{
"fingerprint": fingerprint,
"count": len(recipes),
"recipes": recipes,
}
)
response_data.sort(key=lambda entry: entry["count"], reverse=True)
return web.json_response({"success": True, "duplicate_groups": response_data})
except Exception as exc:
self._logger.error("Error finding duplicate recipes: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_recipe_syntax(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
try:
syntax_parts = await recipe_scanner.get_recipe_syntax_tokens(recipe_id)
except RecipeNotFoundError:
return web.json_response({"error": "Recipe not found"}, status=404)
if not syntax_parts:
return web.json_response({"error": "No LoRAs found in this recipe"}, status=400)
return web.json_response({"success": True, "syntax": " ".join(syntax_parts)})
except Exception as exc:
self._logger.error("Error generating recipe syntax: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
class RecipeManagementHandler:
"""Handle create/update/delete style recipe operations."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
recipe_scanner_getter: RecipeScannerGetter,
logger: Logger,
persistence_service: RecipePersistenceService,
analysis_service: RecipeAnalysisService,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
self._logger = logger
self._persistence_service = persistence_service
self._analysis_service = analysis_service
async def save_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
reader = await request.multipart()
payload = await self._parse_save_payload(reader)
result = await self._persistence_service.save_recipe(
recipe_scanner=recipe_scanner,
image_bytes=payload["image_bytes"],
image_base64=payload["image_base64"],
name=payload["name"],
tags=payload["tags"],
metadata=payload["metadata"],
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"error": str(exc)}, status=400)
except Exception as exc:
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def delete_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
result = await self._persistence_service.delete_recipe(
recipe_scanner=recipe_scanner, recipe_id=recipe_id
)
return web.json_response(result.payload, status=result.status)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error deleting recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def update_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
data = await request.json()
result = await self._persistence_service.update_recipe(
recipe_scanner=recipe_scanner, recipe_id=recipe_id, updates=data
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"error": str(exc)}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error updating recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def reconnect_lora(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
data = await request.json()
for field in ("recipe_id", "lora_index", "target_name"):
if field not in data:
raise RecipeValidationError(f"Missing required field: {field}")
result = await self._persistence_service.reconnect_lora(
recipe_scanner=recipe_scanner,
recipe_id=data["recipe_id"],
lora_index=int(data["lora_index"]),
target_name=data["target_name"],
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"error": str(exc)}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error reconnecting LoRA: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def bulk_delete(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
data = await request.json()
recipe_ids = data.get("recipe_ids", [])
result = await self._persistence_service.bulk_delete(
recipe_scanner=recipe_scanner, recipe_ids=recipe_ids
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error performing bulk delete: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def save_recipe_from_widget(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
analysis = await self._analysis_service.analyze_widget_metadata(
recipe_scanner=recipe_scanner
)
metadata = analysis.payload.get("metadata")
image_bytes = analysis.payload.get("image_bytes")
if not metadata or image_bytes is None:
raise RecipeValidationError("Unable to extract metadata from widget")
result = await self._persistence_service.save_recipe_from_widget(
recipe_scanner=recipe_scanner,
metadata=metadata,
image_bytes=image_bytes,
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"error": str(exc)}, status=400)
except Exception as exc:
self._logger.error("Error saving recipe from widget: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def _parse_save_payload(self, reader) -> dict[str, Any]:
image_bytes: Optional[bytes] = None
image_base64: Optional[str] = None
name: Optional[str] = None
tags: list[str] = []
metadata: Optional[Dict[str, Any]] = None
while True:
field = await reader.next()
if field is None:
break
if field.name == "image":
image_chunks = bytearray()
while True:
chunk = await field.read_chunk()
if not chunk:
break
image_chunks.extend(chunk)
image_bytes = bytes(image_chunks)
elif field.name == "image_base64":
image_base64 = await field.text()
elif field.name == "name":
name = await field.text()
elif field.name == "tags":
tags_text = await field.text()
try:
parsed_tags = json.loads(tags_text)
tags = parsed_tags if isinstance(parsed_tags, list) else []
except Exception:
tags = []
elif field.name == "metadata":
metadata_text = await field.text()
try:
metadata = json.loads(metadata_text)
except Exception:
metadata = {}
return {
"image_bytes": image_bytes,
"image_base64": image_base64,
"name": name,
"tags": tags,
"metadata": metadata,
}
class RecipeAnalysisHandler:
"""Analyze images to extract recipe metadata."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
recipe_scanner_getter: RecipeScannerGetter,
civitai_client_getter: CivitaiClientGetter,
logger: Logger,
analysis_service: RecipeAnalysisService,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
self._civitai_client_getter = civitai_client_getter
self._logger = logger
self._analysis_service = analysis_service
async def analyze_uploaded_image(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
civitai_client = self._civitai_client_getter()
if recipe_scanner is None or civitai_client is None:
raise RuntimeError("Required services unavailable")
content_type = request.headers.get("Content-Type", "")
if "multipart/form-data" in content_type:
reader = await request.multipart()
field = await reader.next()
if field is None or field.name != "image":
raise RecipeValidationError("No image field found")
image_chunks = bytearray()
while True:
chunk = await field.read_chunk()
if not chunk:
break
image_chunks.extend(chunk)
result = await self._analysis_service.analyze_uploaded_image(
image_bytes=bytes(image_chunks),
recipe_scanner=recipe_scanner,
)
return web.json_response(result.payload, status=result.status)
if "application/json" in content_type:
data = await request.json()
result = await self._analysis_service.analyze_remote_image(
url=data.get("url"),
recipe_scanner=recipe_scanner,
civitai_client=civitai_client,
)
return web.json_response(result.payload, status=result.status)
raise RecipeValidationError("Unsupported content type")
except RecipeValidationError as exc:
return web.json_response({"error": str(exc), "loras": []}, status=400)
except RecipeDownloadError as exc:
return web.json_response({"error": str(exc), "loras": []}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc), "loras": []}, status=404)
except Exception as exc:
self._logger.error("Error analyzing recipe image: %s", exc, exc_info=True)
return web.json_response({"error": str(exc), "loras": []}, status=500)
async def analyze_local_image(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
data = await request.json()
result = await self._analysis_service.analyze_local_image(
file_path=data.get("path"),
recipe_scanner=recipe_scanner,
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"error": str(exc), "loras": []}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc), "loras": []}, status=404)
except Exception as exc:
self._logger.error("Error analyzing local image: %s", exc, exc_info=True)
return web.json_response({"error": str(exc), "loras": []}, status=500)
class RecipeSharingHandler:
"""Serve endpoints related to recipe sharing."""
def __init__(
self,
*,
ensure_dependencies_ready: EnsureDependenciesCallable,
recipe_scanner_getter: RecipeScannerGetter,
logger: Logger,
sharing_service: RecipeSharingService,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
self._logger = logger
self._sharing_service = sharing_service
async def share_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
result = await self._sharing_service.share_recipe(
recipe_scanner=recipe_scanner, recipe_id=recipe_id
)
return web.json_response(result.payload, status=result.status)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error sharing recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def download_shared_recipe(self, request: web.Request) -> web.StreamResponse:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
recipe_id = request.match_info["recipe_id"]
download_info = await self._sharing_service.prepare_download(
recipe_scanner=recipe_scanner, recipe_id=recipe_id
)
return web.FileResponse(
download_info.file_path,
headers={
"Content-Disposition": f'attachment; filename="{download_info.download_filename}"'
},
)
except RecipeNotFoundError as exc:
return web.json_response({"error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error downloading shared recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)

View File

@@ -5,9 +5,9 @@ from typing import Dict
from server import PromptServer # type: ignore
from .base_model_routes import BaseModelRoutes
from .model_route_registrar import ModelRouteRegistrar
from ..services.lora_service import LoraService
from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_default_metadata_provider
from ..utils.utils import get_lora_info
logger = logging.getLogger(__name__)
@@ -17,8 +17,7 @@ class LoraRoutes(BaseModelRoutes):
def __init__(self):
"""Initialize LoRA routes with LoRA service"""
# Service will be initialized later via setup_routes
self.service = None
super().__init__()
self.template_name = "loras.html"
async def initialize_services(self):
@@ -26,26 +25,26 @@ class LoraRoutes(BaseModelRoutes):
lora_scanner = await ServiceRegistry.get_lora_scanner()
self.service = LoraService(lora_scanner)
# Initialize parent with the service
super().__init__(self.service)
# Attach service dependencies
self.attach_service(self.service)
def setup_routes(self, app: web.Application):
"""Setup LoRA routes"""
# Schedule service initialization on app startup
app.on_startup.append(lambda _: self.initialize_services())
# Setup common routes with 'loras' prefix (includes page route)
super().setup_routes(app, 'loras')
def setup_specific_routes(self, app: web.Application, prefix: str):
def setup_specific_routes(self, registrar: ModelRouteRegistrar, prefix: str):
"""Setup LoRA-specific routes"""
# LoRA-specific query routes
app.router.add_get(f'/api/lm/{prefix}/letter-counts', self.get_letter_counts)
app.router.add_get(f'/api/lm/{prefix}/get-trigger-words', self.get_lora_trigger_words)
app.router.add_get(f'/api/lm/{prefix}/usage-tips-by-path', self.get_lora_usage_tips_by_path)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/letter-counts', prefix, self.get_letter_counts)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/get-trigger-words', prefix, self.get_lora_trigger_words)
registrar.add_prefixed_route('GET', '/api/lm/{prefix}/usage-tips-by-path', prefix, self.get_lora_usage_tips_by_path)
# ComfyUI integration
app.router.add_post(f'/api/lm/{prefix}/get_trigger_words', self.get_trigger_words)
registrar.add_prefixed_route('POST', '/api/lm/{prefix}/get_trigger_words', prefix, self.get_trigger_words)
def _parse_specific_params(self, request: web.Request) -> Dict:
"""Parse LoRA-specific parameters"""

View File

@@ -0,0 +1,69 @@
"""Route registrar for miscellaneous endpoints.
This module mirrors the model route registrar architecture so that
miscellaneous endpoints share a consistent registration flow.
"""
from dataclasses import dataclass
from typing import Callable, Iterable, Mapping
from aiohttp import web
@dataclass(frozen=True)
class RouteDefinition:
"""Declarative definition for a HTTP route."""
method: str
path: str
handler_name: str
MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("GET", "/api/lm/settings", "get_settings"),
RouteDefinition("POST", "/api/lm/settings", "update_settings"),
RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"),
RouteDefinition("POST", "/api/lm/settings/libraries/activate", "activate_library"),
RouteDefinition("GET", "/api/lm/health-check", "health_check"),
RouteDefinition("POST", "/api/lm/open-file-location", "open_file_location"),
RouteDefinition("POST", "/api/lm/update-usage-stats", "update_usage_stats"),
RouteDefinition("GET", "/api/lm/get-usage-stats", "get_usage_stats"),
RouteDefinition("POST", "/api/lm/update-lora-code", "update_lora_code"),
RouteDefinition("GET", "/api/lm/trained-words", "get_trained_words"),
RouteDefinition("GET", "/api/lm/model-example-files", "get_model_example_files"),
RouteDefinition("POST", "/api/lm/register-nodes", "register_nodes"),
RouteDefinition("GET", "/api/lm/get-registry", "get_registry"),
RouteDefinition("GET", "/api/lm/check-model-exists", "check_model_exists"),
RouteDefinition("POST", "/api/lm/download-metadata-archive", "download_metadata_archive"),
RouteDefinition("POST", "/api/lm/remove-metadata-archive", "remove_metadata_archive"),
RouteDefinition("GET", "/api/lm/metadata-archive-status", "get_metadata_archive_status"),
RouteDefinition("GET", "/api/lm/model-versions-status", "get_model_versions_status"),
)
class MiscRouteRegistrar:
"""Bind miscellaneous route definitions to an aiohttp router."""
_METHOD_MAP = {
"GET": "add_get",
"POST": "add_post",
"PUT": "add_put",
"DELETE": "add_delete",
}
def __init__(self, app: web.Application) -> None:
self._app = app
def register_routes(
self,
handler_lookup: Mapping[str, Callable[[web.Request], object]],
*,
definitions: Iterable[RouteDefinition] = MISC_ROUTE_DEFINITIONS,
) -> None:
for definition in definitions:
self._bind(definition.method, definition.path, handler_lookup[definition.handler_name])
def _bind(self, method: str, path: str, handler: Callable) -> None:
add_method_name = self._METHOD_MAP[method.upper()]
add_method = getattr(self._app.router, add_method_name)
add_method(path, handler)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,99 @@
"""Route registrar for model endpoints."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Callable, Iterable, Mapping
from aiohttp import web
@dataclass(frozen=True)
class RouteDefinition:
"""Declarative definition for a HTTP route."""
method: str
path_template: str
handler_name: str
def build_path(self, prefix: str) -> str:
return self.path_template.replace("{prefix}", prefix)
COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("GET", "/api/lm/{prefix}/list", "get_models"),
RouteDefinition("POST", "/api/lm/{prefix}/delete", "delete_model"),
RouteDefinition("POST", "/api/lm/{prefix}/exclude", "exclude_model"),
RouteDefinition("POST", "/api/lm/{prefix}/fetch-civitai", "fetch_civitai"),
RouteDefinition("POST", "/api/lm/{prefix}/fetch-all-civitai", "fetch_all_civitai"),
RouteDefinition("POST", "/api/lm/{prefix}/relink-civitai", "relink_civitai"),
RouteDefinition("POST", "/api/lm/{prefix}/replace-preview", "replace_preview"),
RouteDefinition("POST", "/api/lm/{prefix}/save-metadata", "save_metadata"),
RouteDefinition("POST", "/api/lm/{prefix}/add-tags", "add_tags"),
RouteDefinition("POST", "/api/lm/{prefix}/rename", "rename_model"),
RouteDefinition("POST", "/api/lm/{prefix}/bulk-delete", "bulk_delete_models"),
RouteDefinition("POST", "/api/lm/{prefix}/verify-duplicates", "verify_duplicates"),
RouteDefinition("POST", "/api/lm/{prefix}/move_model", "move_model"),
RouteDefinition("POST", "/api/lm/{prefix}/move_models_bulk", "move_models_bulk"),
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
RouteDefinition("POST", "/api/lm/{prefix}/auto-organize", "auto_organize_models"),
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize-progress", "get_auto_organize_progress"),
RouteDefinition("GET", "/api/lm/{prefix}/top-tags", "get_top_tags"),
RouteDefinition("GET", "/api/lm/{prefix}/base-models", "get_base_models"),
RouteDefinition("GET", "/api/lm/{prefix}/scan", "scan_models"),
RouteDefinition("GET", "/api/lm/{prefix}/roots", "get_model_roots"),
RouteDefinition("GET", "/api/lm/{prefix}/folders", "get_folders"),
RouteDefinition("GET", "/api/lm/{prefix}/folder-tree", "get_folder_tree"),
RouteDefinition("GET", "/api/lm/{prefix}/unified-folder-tree", "get_unified_folder_tree"),
RouteDefinition("GET", "/api/lm/{prefix}/find-duplicates", "find_duplicate_models"),
RouteDefinition("GET", "/api/lm/{prefix}/find-filename-conflicts", "find_filename_conflicts"),
RouteDefinition("GET", "/api/lm/{prefix}/get-notes", "get_model_notes"),
RouteDefinition("GET", "/api/lm/{prefix}/preview-url", "get_model_preview_url"),
RouteDefinition("GET", "/api/lm/{prefix}/civitai-url", "get_model_civitai_url"),
RouteDefinition("GET", "/api/lm/{prefix}/metadata", "get_model_metadata"),
RouteDefinition("GET", "/api/lm/{prefix}/model-description", "get_model_description"),
RouteDefinition("GET", "/api/lm/{prefix}/relative-paths", "get_relative_paths"),
RouteDefinition("GET", "/api/lm/{prefix}/civitai/versions/{model_id}", "get_civitai_versions"),
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/version/{modelVersionId}", "get_civitai_model_by_version"),
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/hash/{hash}", "get_civitai_model_by_hash"),
RouteDefinition("POST", "/api/lm/download-model", "download_model"),
RouteDefinition("GET", "/api/lm/download-model-get", "download_model_get"),
RouteDefinition("GET", "/api/lm/cancel-download-get", "cancel_download_get"),
RouteDefinition("GET", "/api/lm/download-progress/{download_id}", "get_download_progress"),
RouteDefinition("GET", "/{prefix}", "handle_models_page"),
)
class ModelRouteRegistrar:
"""Bind declarative definitions to an aiohttp router."""
_METHOD_MAP = {
"GET": "add_get",
"POST": "add_post",
"PUT": "add_put",
"DELETE": "add_delete",
}
def __init__(self, app: web.Application) -> None:
self._app = app
def register_common_routes(
self,
prefix: str,
handler_lookup: Mapping[str, Callable[[web.Request], object]],
*,
definitions: Iterable[RouteDefinition] = COMMON_ROUTE_DEFINITIONS,
) -> None:
for definition in definitions:
self._bind_route(definition.method, definition.build_path(prefix), handler_lookup[definition.handler_name])
def add_route(self, method: str, path: str, handler: Callable) -> None:
self._bind_route(method, path, handler)
def add_prefixed_route(self, method: str, path_template: str, prefix: str, handler: Callable) -> None:
self._bind_route(method, path_template.replace("{prefix}", prefix), handler)
def _bind_route(self, method: str, path: str, handler: Callable) -> None:
add_method_name = self._METHOD_MAP[method.upper()]
add_method = getattr(self._app.router, add_method_name)
add_method(path, handler)

View File

@@ -0,0 +1,25 @@
"""Route controller for preview asset delivery."""
from __future__ import annotations
from aiohttp import web
from .handlers.preview_handlers import PreviewHandler
class PreviewRoutes:
"""Register routes that expose preview assets."""
def __init__(self, *, handler: PreviewHandler | None = None) -> None:
self._handler = handler or PreviewHandler()
@classmethod
def setup_routes(cls, app: web.Application) -> None:
controller = cls()
controller.register(app)
def register(self, app: web.Application) -> None:
app.router.add_get('/api/lm/previews', self._handler.serve_preview)
__all__ = ["PreviewRoutes"]

View File

@@ -0,0 +1,64 @@
"""Route registrar for recipe endpoints."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Callable, Mapping
from aiohttp import web
@dataclass(frozen=True)
class RouteDefinition:
"""Declarative definition for a recipe HTTP route."""
method: str
path: str
handler_name: str
ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("GET", "/loras/recipes", "render_page"),
RouteDefinition("GET", "/api/lm/recipes", "list_recipes"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}", "get_recipe"),
RouteDefinition("POST", "/api/lm/recipes/analyze-image", "analyze_uploaded_image"),
RouteDefinition("POST", "/api/lm/recipes/analyze-local-image", "analyze_local_image"),
RouteDefinition("POST", "/api/lm/recipes/save", "save_recipe"),
RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"),
RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"),
RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/syntax", "get_recipe_syntax"),
RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"),
RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"),
RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"),
RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"),
RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"),
RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"),
RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"),
)
class RecipeRouteRegistrar:
"""Bind declarative recipe definitions to an aiohttp router."""
_METHOD_MAP = {
"GET": "add_get",
"POST": "add_post",
"PUT": "add_put",
"DELETE": "add_delete",
}
def __init__(self, app: web.Application) -> None:
self._app = app
def register_routes(self, handler_lookup: Mapping[str, Callable[[web.Request], object]]) -> None:
for definition in ROUTE_DEFINITIONS:
handler = handler_lookup[definition.handler_name]
self._bind_route(definition.method, definition.path, handler)
def _bind_route(self, method: str, path: str, handler: Callable) -> None:
add_method_name = self._METHOD_MAP[method.upper()]
add_method = getattr(self._app.router, add_method_name)
add_method(path, handler)

File diff suppressed because it is too large Load Diff

View File

@@ -5,12 +5,18 @@ import git
import zipfile
import shutil
import tempfile
from aiohttp import web
import asyncio
from aiohttp import web, ClientError
from typing import Dict, List
from ..utils.settings_paths import ensure_settings_file
from ..services.downloader import get_downloader
logger = logging.getLogger(__name__)
NETWORK_EXCEPTIONS = (ClientError, OSError, asyncio.TimeoutError)
class UpdateRoutes:
"""Routes for handling plugin update checks"""
@@ -63,6 +69,12 @@ class UpdateRoutes:
'nightly': nightly
})
except NETWORK_EXCEPTIONS as e:
logger.warning("Network unavailable during update check: %s", e)
return web.json_response({
'success': False,
'error': 'Network unavailable for update check'
})
except Exception as e:
logger.error(f"Failed to check for updates: {e}", exc_info=True)
return web.json_response({
@@ -111,7 +123,7 @@ class UpdateRoutes:
current_dir = os.path.dirname(os.path.abspath(__file__))
plugin_root = os.path.dirname(os.path.dirname(current_dir))
settings_path = os.path.join(plugin_root, 'settings.json')
settings_path = ensure_settings_file(logger)
settings_backup = None
if os.path.exists(settings_path):
with open(settings_path, 'r', encoding='utf-8') as f:
@@ -283,6 +295,9 @@ class UpdateRoutes:
return version, changelog
except NETWORK_EXCEPTIONS as e:
logger.warning("Unable to reach GitHub for nightly version: %s", e)
return "main", []
except Exception as e:
logger.error(f"Error fetching nightly version: {e}", exc_info=True)
return "main", []
@@ -448,6 +463,9 @@ class UpdateRoutes:
return version, changelog
except NETWORK_EXCEPTIONS as e:
logger.warning("Unable to reach GitHub for release info: %s", e)
return "v0.0.0", []
except Exception as e:
logger.error(f"Error fetching remote version: {e}", exc_info=True)
return "v0.0.0", []

View File

@@ -4,99 +4,89 @@ import logging
import os
from ..utils.models import BaseModelMetadata
from ..utils.routes_common import ModelRouteUtils
from ..utils.constants import NSFW_LEVELS
from .settings_manager import settings
from ..utils.utils import fuzzy_match
from ..utils.metadata_manager import MetadataManager
from .model_query import FilterCriteria, ModelCacheRepository, ModelFilterSet, SearchStrategy, SettingsProvider
from .settings_manager import settings as default_settings
logger = logging.getLogger(__name__)
class BaseModelService(ABC):
"""Base service class for all model types"""
def __init__(self, model_type: str, scanner, metadata_class: Type[BaseModelMetadata]):
"""Initialize the service
def __init__(
self,
model_type: str,
scanner,
metadata_class: Type[BaseModelMetadata],
*,
cache_repository: Optional[ModelCacheRepository] = None,
filter_set: Optional[ModelFilterSet] = None,
search_strategy: Optional[SearchStrategy] = None,
settings_provider: Optional[SettingsProvider] = None,
):
"""Initialize the service.
Args:
model_type: Type of model (lora, checkpoint, etc.)
scanner: Model scanner instance
metadata_class: Metadata class for this model type
model_type: Type of model (lora, checkpoint, etc.).
scanner: Model scanner instance.
metadata_class: Metadata class for this model type.
cache_repository: Custom repository for cache access (primarily for tests).
filter_set: Filter component controlling folder/tag/favorites logic.
search_strategy: Search component for fuzzy/text matching.
settings_provider: Settings object; defaults to the global settings manager.
"""
self.model_type = model_type
self.scanner = scanner
self.metadata_class = metadata_class
self.settings = settings_provider or default_settings
self.cache_repository = cache_repository or ModelCacheRepository(scanner)
self.filter_set = filter_set or ModelFilterSet(self.settings)
self.search_strategy = search_strategy or SearchStrategy()
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'name',
folder: str = None, search: str = None, fuzzy_search: bool = False,
base_models: list = None, tags: list = None,
search_options: dict = None, hash_filters: dict = None,
favorites_only: bool = False, **kwargs) -> Dict:
"""Get paginated and filtered model data
Args:
page: Page number (1-based)
page_size: Number of items per page
sort_by: Sort criteria, e.g. 'name', 'name:asc', 'name:desc', 'date', 'date:asc', 'date:desc'
folder: Folder filter
search: Search term
fuzzy_search: Whether to use fuzzy search
base_models: List of base models to filter by
tags: List of tags to filter by
search_options: Search options dict
hash_filters: Hash filtering options
favorites_only: Filter for favorites only
**kwargs: Additional model-specific filters
Returns:
Dict containing paginated results
"""
cache = await self.scanner.get_cached_data()
async def get_paginated_data(
self,
page: int,
page_size: int,
sort_by: str = 'name',
folder: str = None,
search: str = None,
fuzzy_search: bool = False,
base_models: list = None,
tags: list = None,
search_options: dict = None,
hash_filters: dict = None,
favorites_only: bool = False,
**kwargs,
) -> Dict:
"""Get paginated and filtered model data"""
sort_params = self.cache_repository.parse_sort(sort_by)
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
# Parse sort_by into sort_key and order
if ':' in sort_by:
sort_key, order = sort_by.split(':', 1)
sort_key = sort_key.strip()
order = order.strip().lower()
if order not in ('asc', 'desc'):
order = 'asc'
else:
sort_key = sort_by.strip()
order = 'asc'
# Get default search options if not provided
if search_options is None:
search_options = {
'filename': True,
'modelname': True,
'tags': False,
'recursive': True,
}
# Get the base data set using new sort logic
filtered_data = await cache.get_sorted_data(sort_key, order)
# Apply hash filtering if provided (highest priority)
if hash_filters:
filtered_data = await self._apply_hash_filters(filtered_data, hash_filters)
# Jump to pagination for hash filters
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
return self._paginate(filtered_data, page, page_size)
# Apply common filters
filtered_data = await self._apply_common_filters(
filtered_data, folder, base_models, tags, favorites_only, search_options
sorted_data,
folder=folder,
base_models=base_models,
tags=tags,
favorites_only=favorites_only,
search_options=search_options,
)
# Apply search filtering
if search:
filtered_data = await self._apply_search_filters(
filtered_data, search, fuzzy_search, search_options
filtered_data,
search,
fuzzy_search,
search_options,
)
# Apply model-specific filters
filtered_data = await self._apply_specific_filters(filtered_data, **kwargs)
return self._paginate(filtered_data, page, page_size)
async def _apply_hash_filters(self, data: List[Dict], hash_filters: Dict) -> List[Dict]:
"""Apply hash-based filtering"""
@@ -120,113 +110,36 @@ class BaseModelService(ABC):
return data
async def _apply_common_filters(self, data: List[Dict], folder: str = None,
base_models: list = None, tags: list = None,
favorites_only: bool = False, search_options: dict = None) -> List[Dict]:
async def _apply_common_filters(
self,
data: List[Dict],
folder: str = None,
base_models: list = None,
tags: list = None,
favorites_only: bool = False,
search_options: dict = None,
) -> List[Dict]:
"""Apply common filters that work across all model types"""
# Apply SFW filtering if enabled in settings
if settings.get('show_only_sfw', False):
data = [
item for item in data
if not item.get('preview_nsfw_level') or item.get('preview_nsfw_level') < NSFW_LEVELS['R']
]
# Apply favorites filtering if enabled
if favorites_only:
data = [
item for item in data
if item.get('favorite', False) is True
]
# Apply folder filtering
if folder is not None:
if search_options and search_options.get('recursive', True):
# Recursive folder filtering - include all subfolders
# Ensure we match exact folder or its subfolders by checking path boundaries
if folder == "":
# Empty folder means root - include all items
pass # Don't filter anything
else:
# Add trailing slash to ensure we match folder boundaries correctly
folder_with_separator = folder + "/"
data = [
item for item in data
if (item['folder'] == folder or
item['folder'].startswith(folder_with_separator))
]
else:
# Exact folder filtering
data = [
item for item in data
if item['folder'] == folder
]
# Apply base model filtering
if base_models and len(base_models) > 0:
data = [
item for item in data
if item.get('base_model') in base_models
]
# Apply tag filtering
if tags and len(tags) > 0:
data = [
item for item in data
if any(tag in item.get('tags', []) for tag in tags)
]
return data
normalized_options = self.search_strategy.normalize_options(search_options)
criteria = FilterCriteria(
folder=folder,
base_models=base_models,
tags=tags,
favorites_only=favorites_only,
search_options=normalized_options,
)
return self.filter_set.apply(data, criteria)
async def _apply_search_filters(self, data: List[Dict], search: str,
fuzzy_search: bool, search_options: dict) -> List[Dict]:
async def _apply_search_filters(
self,
data: List[Dict],
search: str,
fuzzy_search: bool,
search_options: dict,
) -> List[Dict]:
"""Apply search filtering"""
search_results = []
for item in data:
# Search by file name
if search_options.get('filename', True):
if fuzzy_search:
if fuzzy_match(item.get('file_name', ''), search):
search_results.append(item)
continue
elif search.lower() in item.get('file_name', '').lower():
search_results.append(item)
continue
# Search by model name
if search_options.get('modelname', True):
if fuzzy_search:
if fuzzy_match(item.get('model_name', ''), search):
search_results.append(item)
continue
elif search.lower() in item.get('model_name', '').lower():
search_results.append(item)
continue
# Search by tags
if search_options.get('tags', False) and 'tags' in item:
if any((fuzzy_match(tag, search) if fuzzy_search else search.lower() in tag.lower())
for tag in item['tags']):
search_results.append(item)
continue
# Search by creator
civitai = item.get('civitai')
creator_username = ''
if civitai and isinstance(civitai, dict):
creator = civitai.get('creator')
if creator and isinstance(creator, dict):
creator_username = creator.get('username', '')
if search_options.get('creator', False) and creator_username:
if fuzzy_search:
if fuzzy_match(creator_username, search):
search_results.append(item)
continue
elif search.lower() in creator_username.lower():
search_results.append(item)
continue
return search_results
normalized_options = self.search_strategy.normalize_options(search_options)
return self.search_strategy.apply(data, search, normalized_options, fuzzy_search)
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:
"""Apply model-specific filters - to be overridden by subclasses if needed"""
@@ -284,6 +197,18 @@ class BaseModelService(ABC):
"""Get model root directories"""
return self.scanner.get_model_roots()
def filter_civitai_data(self, data: Dict, minimal: bool = False) -> Dict:
"""Filter relevant fields from CivitAI data"""
if not data:
return {}
fields = ["id", "modelId", "name", "trainedWords"] if minimal else [
"id", "modelId", "name", "createdAt", "updatedAt",
"publishedAt", "trainedWords", "baseModel", "description",
"model", "images", "customImages", "creator"
]
return {k: data[k] for k in fields if k in data}
async def get_folder_tree(self, model_root: str) -> Dict:
"""Get hierarchical folder tree for a specific model root"""
cache = await self.scanner.get_cached_data()
@@ -389,24 +314,24 @@ class BaseModelService(ABC):
return {'civitai_url': None, 'model_id': None, 'version_id': None}
async def get_model_metadata(self, file_path: str) -> Optional[Dict]:
"""Get filtered CivitAI metadata for a model by file path"""
cache = await self.scanner.get_cached_data()
for model in cache.raw_data:
if model.get('file_path') == file_path:
return ModelRouteUtils.filter_civitai_data(model.get("civitai", {}))
return None
"""Load full metadata for a single model.
Listing/search endpoints return lightweight cache entries; this method performs
a lazy read of the on-disk metadata snapshot when callers need full detail.
"""
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.metadata_class)
if should_skip or metadata is None:
return None
return self.filter_civitai_data(metadata.to_dict().get("civitai", {}))
async def get_model_description(self, file_path: str) -> Optional[str]:
"""Get model description by file path"""
cache = await self.scanner.get_cached_data()
for model in cache.raw_data:
if model.get('file_path') == file_path:
return model.get('modelDescription', '')
return None
"""Return the stored modelDescription field for a model."""
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.metadata_class)
if should_skip or metadata is None:
return None
return metadata.modelDescription or ''
async def search_relative_paths(self, search_term: str, limit: int = 15) -> List[str]:
"""Search model relative file paths for autocomplete functionality"""

View File

@@ -1,11 +1,10 @@
import os
import logging
from typing import Dict, List, Optional
from typing import Dict
from .base_model_service import BaseModelService
from ..utils.models import CheckpointMetadata
from ..config import config
from ..utils.routes_common import ModelRouteUtils
logger = logging.getLogger(__name__)
@@ -38,7 +37,7 @@ class CheckpointService(BaseModelService):
"notes": checkpoint_data.get("notes", ""),
"model_type": checkpoint_data.get("model_type", "checkpoint"),
"favorite": checkpoint_data.get("favorite", False),
"civitai": ModelRouteUtils.filter_civitai_data(checkpoint_data.get("civitai", {}), minimal=True)
"civitai": self.filter_civitai_data(checkpoint_data.get("civitai", {}), minimal=True)
}
def find_duplicate_hashes(self) -> Dict:

View File

@@ -1,4 +1,5 @@
import os
import copy
import logging
import asyncio
from typing import Optional, Dict, Tuple, List
@@ -31,6 +32,24 @@ class CivitaiClient:
self._initialized = True
self.base_url = "https://civitai.com/api/v1"
@staticmethod
def _remove_comfy_metadata(model_version: Optional[Dict]) -> None:
"""Remove Comfy-specific metadata from model version images."""
if not isinstance(model_version, dict):
return
images = model_version.get("images")
if not isinstance(images, list):
return
for image in images:
if not isinstance(image, dict):
continue
meta = image.get("meta")
if isinstance(meta, dict) and "comfy" in meta:
meta.pop("comfy", None)
async def download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
"""Download file with resumable downloads and retry mechanism
@@ -80,10 +99,11 @@ class CivitaiClient:
# Enrich version_info with model data
result['model']['description'] = data.get("description")
result['model']['tags'] = data.get("tags", [])
# Add creator from model data
result['creator'] = data.get("creator")
self._remove_comfy_metadata(result)
return result, None
# Handle specific error cases
@@ -176,7 +196,8 @@ class CivitaiClient:
version['model']['description'] = model_data.get("description")
version['model']['tags'] = model_data.get("tags", [])
version['creator'] = model_data.get("creator")
self._remove_comfy_metadata(version)
return version
# Case 2: model_id is provided (with or without version_id)
@@ -189,31 +210,77 @@ class CivitaiClient:
)
if not success:
return None
model_versions = data.get('modelVersions', [])
# Step 2: Determine the version_id to use
target_version_id = version_id
if target_version_id is None:
target_version_id = model_versions[0].get('id')
# Step 3: Get detailed version info using the version_id
success, version = await downloader.make_request(
'GET',
f"{self.base_url}/model-versions/{target_version_id}",
use_auth=True
)
if not success:
if not model_versions:
logger.warning(f"No model versions found for model {model_id}")
return None
# Step 2: Determine the target version entry to use
target_version = None
if version_id is not None:
target_version = next(
(item for item in model_versions if item.get('id') == version_id),
None
)
if target_version is None:
logger.warning(
f"Version {version_id} not found for model {model_id}, defaulting to first version"
)
if target_version is None:
target_version = model_versions[0]
target_version_id = target_version.get('id')
# Step 3: Get detailed version info using the SHA256 hash
model_hash = None
for file_info in target_version.get('files', []):
if file_info.get('type') == 'Model' and file_info.get('primary'):
model_hash = file_info.get('hashes', {}).get('SHA256')
if model_hash:
break
version = None
if model_hash:
success, version = await downloader.make_request(
'GET',
f"{self.base_url}/model-versions/by-hash/{model_hash}",
use_auth=True
)
if not success:
logger.warning(
f"Failed to fetch version by hash for model {model_id} version {target_version_id}: {version}"
)
version = None
else:
logger.warning(
f"No primary model hash found for model {model_id} version {target_version_id}"
)
if version is None:
version = copy.deepcopy(target_version)
version.pop('index', None)
version['modelId'] = model_id
version['model'] = {
'name': data.get('name'),
'type': data.get('type'),
'nsfw': data.get('nsfw'),
'poi': data.get('poi')
}
# Step 4: Enrich version_info with model data
# Add description and tags from model data
version['model']['description'] = data.get("description")
version['model']['tags'] = data.get("tags", [])
model_info = version.get('model')
if not isinstance(model_info, dict):
model_info = {}
version['model'] = model_info
model_info['description'] = data.get("description")
model_info['tags'] = data.get("tags", [])
# Add creator from model data
version['creator'] = data.get("creator")
self._remove_comfy_metadata(version)
return version
# Case 3: Neither model_id nor version_id provided
@@ -249,6 +316,7 @@ class CivitaiClient:
if success:
logger.debug(f"Successfully fetched model version info for: {version_id}")
self._remove_comfy_metadata(result)
return result, None
# Handle specific error cases

View File

@@ -0,0 +1,100 @@
"""Service wrapper for coordinating download lifecycle events."""
from __future__ import annotations
import logging
from typing import Any, Awaitable, Callable, Dict, Optional
logger = logging.getLogger(__name__)
class DownloadCoordinator:
"""Manage download scheduling, cancellation and introspection."""
def __init__(
self,
*,
ws_manager,
download_manager_factory: Callable[[], Awaitable],
) -> None:
self._ws_manager = ws_manager
self._download_manager_factory = download_manager_factory
async def schedule_download(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Schedule a download using the provided payload."""
download_manager = await self._download_manager_factory()
download_id = payload.get("download_id") or self._ws_manager.generate_download_id()
payload.setdefault("download_id", download_id)
async def progress_callback(progress: Any) -> None:
await self._ws_manager.broadcast_download_progress(
download_id,
{
"status": "progress",
"progress": progress,
"download_id": download_id,
},
)
model_id = self._parse_optional_int(payload.get("model_id"), "model_id")
model_version_id = self._parse_optional_int(
payload.get("model_version_id"), "model_version_id"
)
if model_id is None and model_version_id is None:
raise ValueError(
"Missing required parameter: Please provide either 'model_id' or 'model_version_id'"
)
result = await download_manager.download_from_civitai(
model_id=model_id,
model_version_id=model_version_id,
save_dir=payload.get("model_root"),
relative_path=payload.get("relative_path", ""),
use_default_paths=payload.get("use_default_paths", False),
progress_callback=progress_callback,
download_id=download_id,
source=payload.get("source"),
)
result["download_id"] = download_id
return result
async def cancel_download(self, download_id: str) -> Dict[str, Any]:
"""Cancel an active download and emit a broadcast event."""
download_manager = await self._download_manager_factory()
result = await download_manager.cancel_download(download_id)
await self._ws_manager.broadcast_download_progress(
download_id,
{
"status": "cancelled",
"progress": 0,
"download_id": download_id,
"message": "Download cancelled by user",
},
)
return result
async def list_active_downloads(self) -> Dict[str, Any]:
"""Return the active download map from the underlying manager."""
download_manager = await self._download_manager_factory()
return await download_manager.get_active_downloads()
def _parse_optional_int(self, value: Any, field: str) -> Optional[int]:
"""Parse an optional integer from user input."""
if value is None or value == "":
return None
try:
return int(value)
except (TypeError, ValueError) as exc:
raise ValueError(f"Invalid {field}: Must be an integer") from exc

View File

@@ -3,7 +3,7 @@ import os
import asyncio
from collections import OrderedDict
import uuid
from typing import Dict
from typing import Dict, List
from ..utils.models import LoraMetadata, CheckpointMetadata, EmbeddingMetadata
from ..utils.constants import CARD_PREVIEW_WIDTH, VALID_LORA_TYPES, CIVITAI_MODEL_TAGS
from ..utils.exif_utils import ExifUtils
@@ -294,7 +294,18 @@ class DownloadManager:
file_info = next((f for f in version_info.get('files', []) if f.get('primary')), None)
if not file_info:
return {'success': False, 'error': 'No primary file found in metadata'}
if not file_info.get('downloadUrl'):
mirrors = file_info.get('mirrors') or []
download_urls = []
if mirrors:
for mirror in mirrors:
if mirror.get('deletedAt') is None and mirror.get('url'):
download_urls.append(mirror['url'])
else:
download_url = file_info.get('downloadUrl')
if download_url:
download_urls.append(download_url)
if not download_urls:
return {'success': False, 'error': 'No download URL found for primary file'}
# 3. Prepare download
@@ -314,7 +325,7 @@ class DownloadManager:
# 6. Start download process
result = await self._execute_download(
download_url=file_info.get('downloadUrl', ''),
download_urls=download_urls,
save_dir=save_dir,
metadata=metadata,
version_info=version_info,
@@ -388,11 +399,14 @@ class DownloadManager:
formatted_path = formatted_path.replace('{base_model}', mapped_base_model)
formatted_path = formatted_path.replace('{first_tag}', first_tag)
formatted_path = formatted_path.replace('{author}', author)
if model_type == 'embedding':
formatted_path = formatted_path.replace(' ', '_')
return formatted_path
async def _execute_download(self, download_url: str, save_dir: str,
metadata, version_info: Dict,
async def _execute_download(self, download_urls: List[str], save_dir: str,
metadata, version_info: Dict,
relative_path: str, progress_callback=None,
model_type: str = "lora", download_id: str = None) -> Dict:
"""Execute the actual download process including preview images and model files"""
@@ -503,33 +517,44 @@ class DownloadManager:
# Download model file with progress tracking using downloader
downloader = await get_downloader()
# Determine if the download URL is from Civitai
use_auth = download_url.startswith("https://civitai.com/api/download/")
success, result = await downloader.download_file(
download_url,
save_path, # Use full path instead of separate dir and filename
progress_callback=lambda p: self._handle_download_progress(p, progress_callback),
use_auth=use_auth # Only use authentication for Civitai downloads
)
last_error = None
for download_url in download_urls:
use_auth = download_url.startswith("https://civitai.com/api/download/")
success, result = await downloader.download_file(
download_url,
save_path, # Use full path instead of separate dir and filename
progress_callback=lambda p: self._handle_download_progress(p, progress_callback),
use_auth=use_auth # Only use authentication for Civitai downloads
)
if not success:
if success:
break
last_error = result
if os.path.exists(save_path):
try:
os.remove(save_path)
except Exception as e:
logger.warning(f"Failed to remove incomplete file {save_path}: {e}")
else:
# Clean up files on failure, but preserve .part file for resume
cleanup_files = [metadata_path]
if metadata.preview_url and os.path.exists(metadata.preview_url):
cleanup_files.append(metadata.preview_url)
preview_path_value = getattr(metadata, 'preview_url', None)
if preview_path_value and os.path.exists(preview_path_value):
cleanup_files.append(preview_path_value)
for path in cleanup_files:
if path and os.path.exists(path):
try:
os.remove(path)
except Exception as e:
logger.warning(f"Failed to cleanup file {path}: {e}")
# Log but don't remove .part file to allow resume
if os.path.exists(part_path):
logger.info(f"Preserving partial download for resume: {part_path}")
return {'success': False, 'error': result}
return {'success': False, 'error': last_error or 'Failed to download file'}
# 4. Update file information (size and modified time)
metadata.update_file_info(save_path)

View File

@@ -1,11 +1,10 @@
import os
import logging
from typing import Dict, List, Optional
from typing import Dict
from .base_model_service import BaseModelService
from ..utils.models import EmbeddingMetadata
from ..config import config
from ..utils.routes_common import ModelRouteUtils
logger = logging.getLogger(__name__)
@@ -38,7 +37,7 @@ class EmbeddingService(BaseModelService):
"notes": embedding_data.get("notes", ""),
"model_type": embedding_data.get("model_type", "embedding"),
"favorite": embedding_data.get("favorite", False),
"civitai": ModelRouteUtils.filter_civitai_data(embedding_data.get("civitai", {}), minimal=True)
"civitai": self.filter_civitai_data(embedding_data.get("civitai", {}), minimal=True)
}
def find_duplicate_hashes(self) -> Dict:

View File

@@ -0,0 +1,296 @@
"""Service for cleaning up example image folders."""
from __future__ import annotations
import asyncio
import logging
import os
import shutil
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Tuple
from .service_registry import ServiceRegistry
from .settings_manager import settings
from ..utils.example_images_paths import iter_library_roots
logger = logging.getLogger(__name__)
@dataclass(slots=True)
class CleanupResult:
"""Structured result returned from cleanup operations."""
success: bool
checked_folders: int
moved_empty_folders: int
moved_orphaned_folders: int
skipped_non_hash: int
move_failures: int
errors: List[str]
deleted_root: str | None
partial_success: bool
def to_dict(self) -> Dict[str, object]:
"""Convert the dataclass to a serialisable dictionary."""
data = {
"success": self.success,
"checked_folders": self.checked_folders,
"moved_empty_folders": self.moved_empty_folders,
"moved_orphaned_folders": self.moved_orphaned_folders,
"moved_total": self.moved_empty_folders + self.moved_orphaned_folders,
"skipped_non_hash": self.skipped_non_hash,
"move_failures": self.move_failures,
"errors": self.errors,
"deleted_root": self.deleted_root,
"partial_success": self.partial_success,
}
return data
class ExampleImagesCleanupService:
"""Encapsulates logic for cleaning example image folders."""
DELETED_FOLDER_NAME = "_deleted"
def __init__(self, deleted_folder_name: str | None = None) -> None:
self._deleted_folder_name = deleted_folder_name or self.DELETED_FOLDER_NAME
async def cleanup_example_image_folders(self) -> Dict[str, object]:
"""Clean empty or orphaned example image folders by moving them under a deleted bucket."""
example_images_path = settings.get("example_images_path")
if not example_images_path:
logger.debug("Cleanup skipped: example images path not configured")
return {
"success": False,
"error": "Example images path is not configured.",
"error_code": "path_not_configured",
}
base_root = Path(example_images_path)
if not base_root.exists():
logger.debug("Cleanup skipped: example images path missing -> %s", base_root)
return {
"success": False,
"error": "Example images path does not exist.",
"error_code": "path_not_found",
}
try:
lora_scanner = await ServiceRegistry.get_lora_scanner()
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
except Exception as exc: # pragma: no cover - defensive guard
logger.error("Failed to acquire scanners for cleanup: %s", exc, exc_info=True)
return {
"success": False,
"error": f"Failed to load model scanners: {exc}",
"error_code": "scanner_initialization_failed",
}
checked_folders = 0
moved_empty = 0
moved_orphaned = 0
skipped_non_hash = 0
move_failures = 0
errors: List[str] = []
resolved_base = base_root.resolve()
library_paths: List[Tuple[str, Path]] = []
processed_paths = {resolved_base}
for library_name, library_path in iter_library_roots():
if not library_path:
continue
library_root = Path(library_path)
try:
resolved = library_root.resolve()
except FileNotFoundError:
continue
if resolved in processed_paths:
continue
if not library_root.exists():
logger.debug(
"Skipping cleanup for library '%s': folder missing (%s)",
library_name,
library_root,
)
continue
processed_paths.add(resolved)
library_paths.append((library_name, library_root))
deleted_roots: List[Path] = []
# Build list of (label, root) pairs including the base root for legacy layouts
cleanup_targets: List[Tuple[str, Path]] = [("__base__", base_root)] + library_paths
library_root_set = {root.resolve() for _, root in library_paths}
for label, root_path in cleanup_targets:
deleted_bucket = root_path / self._deleted_folder_name
deleted_bucket.mkdir(exist_ok=True)
deleted_roots.append(deleted_bucket)
for entry in os.scandir(root_path):
if not entry.is_dir(follow_symlinks=False):
continue
if entry.name == self._deleted_folder_name:
continue
entry_path = Path(entry.path)
if label == "__base__":
try:
resolved_entry = entry_path.resolve()
except FileNotFoundError:
continue
if resolved_entry in library_root_set:
# Skip library-specific folders tracked separately
continue
checked_folders += 1
try:
if self._is_folder_empty(entry_path):
if await self._remove_empty_folder(entry_path):
moved_empty += 1
else:
move_failures += 1
continue
if not self._is_hash_folder(entry.name):
skipped_non_hash += 1
continue
hash_exists = (
lora_scanner.has_hash(entry.name)
or checkpoint_scanner.has_hash(entry.name)
or embedding_scanner.has_hash(entry.name)
)
if not hash_exists:
if await self._move_folder(entry_path, deleted_bucket):
moved_orphaned += 1
else:
move_failures += 1
except Exception as exc: # pragma: no cover - filesystem guard
move_failures += 1
error_message = f"{entry.name}: {exc}"
errors.append(error_message)
logger.error(
"Error processing example images folder %s: %s",
entry_path,
exc,
exc_info=True,
)
partial_success = move_failures > 0 and (moved_empty > 0 or moved_orphaned > 0)
success = move_failures == 0 and not errors
result = CleanupResult(
success=success,
checked_folders=checked_folders,
moved_empty_folders=moved_empty,
moved_orphaned_folders=moved_orphaned,
skipped_non_hash=skipped_non_hash,
move_failures=move_failures,
errors=errors,
deleted_root=str(deleted_roots[0]) if deleted_roots else None,
partial_success=partial_success,
)
summary = result.to_dict()
summary["deleted_roots"] = [str(path) for path in deleted_roots]
if success:
logger.info(
"Example images cleanup complete: checked=%s, moved_empty=%s, moved_orphaned=%s",
checked_folders,
moved_empty,
moved_orphaned,
)
elif partial_success:
logger.warning(
"Example images cleanup partially complete: moved=%s, failures=%s",
summary["moved_total"],
move_failures,
)
else:
logger.error(
"Example images cleanup failed: move_failures=%s, errors=%s",
move_failures,
errors,
)
return summary
@staticmethod
def _is_folder_empty(folder_path: Path) -> bool:
try:
with os.scandir(folder_path) as iterator:
return not any(iterator)
except FileNotFoundError:
return True
except OSError as exc: # pragma: no cover - defensive guard
logger.debug("Failed to inspect folder %s: %s", folder_path, exc)
return False
@staticmethod
def _is_hash_folder(name: str) -> bool:
if len(name) != 64:
return False
hex_chars = set("0123456789abcdefABCDEF")
return all(char in hex_chars for char in name)
async def _remove_empty_folder(self, folder_path: Path) -> bool:
loop = asyncio.get_running_loop()
try:
await loop.run_in_executor(
None,
shutil.rmtree,
str(folder_path),
)
logger.debug("Removed empty example images folder %s", folder_path)
return True
except Exception as exc: # pragma: no cover - filesystem guard
logger.error("Failed to remove empty example images folder %s: %s", folder_path, exc, exc_info=True)
return False
async def _move_folder(self, folder_path: Path, deleted_bucket: Path) -> bool:
destination = self._build_destination(folder_path.name, deleted_bucket)
loop = asyncio.get_running_loop()
try:
await loop.run_in_executor(
None,
shutil.move,
str(folder_path),
str(destination),
)
logger.debug("Moved example images folder %s -> %s", folder_path, destination)
return True
except Exception as exc: # pragma: no cover - filesystem guard
logger.error(
"Failed to move example images folder %s to %s: %s",
folder_path,
destination,
exc,
exc_info=True,
)
return False
def _build_destination(self, folder_name: str, deleted_bucket: Path) -> Path:
destination = deleted_bucket / folder_name
suffix = 1
while destination.exists():
destination = deleted_bucket / f"{folder_name}_{suffix}"
suffix += 1
return destination

View File

@@ -5,7 +5,6 @@ from typing import Dict, List, Optional
from .base_model_service import BaseModelService
from ..utils.models import LoraMetadata
from ..config import config
from ..utils.routes_common import ModelRouteUtils
logger = logging.getLogger(__name__)
@@ -38,7 +37,7 @@ class LoraService(BaseModelService):
"usage_tips": lora_data.get("usage_tips", ""),
"notes": lora_data.get("notes", ""),
"favorite": lora_data.get("favorite", False),
"civitai": ModelRouteUtils.filter_civitai_data(lora_data.get("civitai", {}), minimal=True)
"civitai": self.filter_civitai_data(lora_data.get("civitai", {}), minimal=True)
}
async def _apply_specific_filters(self, data: List[Dict], **kwargs) -> List[Dict]:

View File

@@ -37,7 +37,7 @@ async def initialize_metadata_providers():
sqlite_provider = SQLiteModelMetadataProvider(db_path)
provider_manager.register_provider('sqlite', sqlite_provider)
providers.append(('sqlite', sqlite_provider))
logger.info(f"SQLite metadata provider registered with database: {db_path}")
logger.debug(f"SQLite metadata provider registered with database: {db_path}")
else:
logger.warning("Metadata archive database is enabled but database file not found")
except Exception as e:
@@ -72,7 +72,7 @@ async def initialize_metadata_providers():
if ordered_providers:
fallback_provider = FallbackMetadataProvider(ordered_providers)
provider_manager.register_provider('fallback', fallback_provider, is_default=True)
logger.info(f"Fallback metadata provider registered with {len(ordered_providers)} providers, Civitai API first")
logger.debug(f"Fallback metadata provider registered with {len(ordered_providers)} providers, Civitai API first")
elif len(providers) == 1:
# Only one provider available, set it as default
provider_name, provider = providers[0]

View File

@@ -0,0 +1,356 @@
"""Services for synchronising metadata with remote providers."""
from __future__ import annotations
import json
import logging
import os
from datetime import datetime
from typing import Any, Awaitable, Callable, Dict, Iterable, Optional
from ..services.settings_manager import SettingsManager
from ..utils.model_utils import determine_base_model
logger = logging.getLogger(__name__)
class MetadataProviderProtocol:
"""Subset of metadata provider interface consumed by the sync service."""
async def get_model_by_hash(self, sha256: str) -> tuple[Optional[Dict[str, Any]], Optional[str]]:
...
async def get_model_version(
self, model_id: int, model_version_id: Optional[int]
) -> Optional[Dict[str, Any]]:
...
class MetadataSyncService:
"""High level orchestration for metadata synchronisation flows."""
def __init__(
self,
*,
metadata_manager,
preview_service,
settings: SettingsManager,
default_metadata_provider_factory: Callable[[], Awaitable[MetadataProviderProtocol]],
metadata_provider_selector: Callable[[str], Awaitable[MetadataProviderProtocol]],
) -> None:
self._metadata_manager = metadata_manager
self._preview_service = preview_service
self._settings = settings
self._get_default_provider = default_metadata_provider_factory
self._get_provider = metadata_provider_selector
async def load_local_metadata(self, metadata_path: str) -> Dict[str, Any]:
"""Load metadata JSON from disk, returning an empty structure when missing."""
if not os.path.exists(metadata_path):
return {}
try:
with open(metadata_path, "r", encoding="utf-8") as handle:
return json.load(handle)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error loading metadata from %s: %s", metadata_path, exc)
return {}
async def mark_not_found_on_civitai(
self, metadata_path: str, local_metadata: Dict[str, Any]
) -> None:
"""Persist the not-found flag for a metadata payload."""
local_metadata["from_civitai"] = False
await self._metadata_manager.save_metadata(metadata_path, local_metadata)
@staticmethod
def is_civitai_api_metadata(meta: Dict[str, Any]) -> bool:
"""Determine if the metadata originated from the CivitAI public API."""
if not isinstance(meta, dict):
return False
files = meta.get("files")
images = meta.get("images")
source = meta.get("source")
return bool(files) and bool(images) and source != "archive_db"
async def update_model_metadata(
self,
metadata_path: str,
local_metadata: Dict[str, Any],
civitai_metadata: Dict[str, Any],
metadata_provider: Optional[MetadataProviderProtocol] = None,
) -> Dict[str, Any]:
"""Merge remote metadata into the local record and persist the result."""
existing_civitai = local_metadata.get("civitai") or {}
if (
civitai_metadata.get("source") == "archive_db"
and self.is_civitai_api_metadata(existing_civitai)
):
logger.info(
"Skip civitai update for %s (%s)",
local_metadata.get("model_name", ""),
existing_civitai.get("name", ""),
)
else:
merged_civitai = existing_civitai.copy()
merged_civitai.update(civitai_metadata)
if civitai_metadata.get("source") == "archive_db":
model_name = civitai_metadata.get("model", {}).get("name", "")
version_name = civitai_metadata.get("name", "")
logger.info(
"Recovered metadata from archive_db for deleted model: %s (%s)",
model_name,
version_name,
)
if "trainedWords" in existing_civitai:
existing_trained = existing_civitai.get("trainedWords", [])
new_trained = civitai_metadata.get("trainedWords", [])
merged_trained = list(set(existing_trained + new_trained))
merged_civitai["trainedWords"] = merged_trained
local_metadata["civitai"] = merged_civitai
if "model" in civitai_metadata and civitai_metadata["model"]:
model_data = civitai_metadata["model"]
if model_data.get("name"):
local_metadata["model_name"] = model_data["name"]
if not local_metadata.get("modelDescription") and model_data.get("description"):
local_metadata["modelDescription"] = model_data["description"]
if not local_metadata.get("tags") and model_data.get("tags"):
local_metadata["tags"] = model_data["tags"]
if model_data.get("creator") and not local_metadata.get("civitai", {}).get(
"creator"
):
local_metadata.setdefault("civitai", {})["creator"] = model_data["creator"]
local_metadata["base_model"] = determine_base_model(
civitai_metadata.get("baseModel")
)
await self._preview_service.ensure_preview_for_metadata(
metadata_path, local_metadata, civitai_metadata.get("images", [])
)
await self._metadata_manager.save_metadata(metadata_path, local_metadata)
return local_metadata
async def fetch_and_update_model(
self,
*,
sha256: str,
file_path: str,
model_data: Dict[str, Any],
update_cache_func: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
) -> tuple[bool, Optional[str]]:
"""Fetch metadata for a model and update both disk and cache state."""
if not isinstance(model_data, dict):
error = f"Invalid model_data type: {type(model_data)}"
logger.error(error)
return False, error
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
enable_archive = self._settings.get("enable_metadata_archive_db", False)
try:
if model_data.get("civitai_deleted") is True:
if not enable_archive or model_data.get("db_checked") is True:
if not enable_archive:
error_msg = "CivitAI model is deleted and metadata archive DB is not enabled"
else:
error_msg = "CivitAI model is deleted and not found in metadata archive DB"
return (False, error_msg)
metadata_provider = await self._get_provider("sqlite")
else:
metadata_provider = await self._get_default_provider()
civitai_metadata, error = await metadata_provider.get_model_by_hash(sha256)
if not civitai_metadata:
if error == "Model not found":
model_data["from_civitai"] = False
model_data["civitai_deleted"] = True
model_data["db_checked"] = enable_archive
model_data["last_checked_at"] = datetime.now().timestamp()
data_to_save = model_data.copy()
data_to_save.pop("folder", None)
await self._metadata_manager.save_metadata(file_path, data_to_save)
error_msg = (
f"Error fetching metadata: {error} (model_name={model_data.get('model_name', '')})"
)
logger.error(error_msg)
return False, error_msg
model_data["from_civitai"] = True
model_data["civitai_deleted"] = civitai_metadata.get("source") == "archive_db"
model_data["db_checked"] = enable_archive
model_data["last_checked_at"] = datetime.now().timestamp()
local_metadata = model_data.copy()
local_metadata.pop("folder", None)
await self.update_model_metadata(
metadata_path,
local_metadata,
civitai_metadata,
metadata_provider,
)
update_payload = {
"model_name": local_metadata.get("model_name"),
"preview_url": local_metadata.get("preview_url"),
"civitai": local_metadata.get("civitai"),
}
model_data.update(update_payload)
await update_cache_func(file_path, file_path, local_metadata)
return True, None
except KeyError as exc:
error_msg = f"Error fetching metadata - Missing key: {exc} in model_data={model_data}"
logger.error(error_msg)
return False, error_msg
except Exception as exc: # pragma: no cover - error path
error_msg = f"Error fetching metadata: {exc}"
logger.error(error_msg, exc_info=True)
return False, error_msg
async def fetch_metadata_by_sha(
self, sha256: str, metadata_provider: Optional[MetadataProviderProtocol] = None
) -> tuple[Optional[Dict[str, Any]], Optional[str]]:
"""Fetch metadata for a SHA256 hash from the configured provider."""
provider = metadata_provider or await self._get_default_provider()
return await provider.get_model_by_hash(sha256)
async def relink_metadata(
self,
*,
file_path: str,
metadata: Dict[str, Any],
model_id: int,
model_version_id: Optional[int],
) -> Dict[str, Any]:
"""Relink a local metadata record to a specific CivitAI model version."""
provider = await self._get_default_provider()
civitai_metadata = await provider.get_model_version(model_id, model_version_id)
if not civitai_metadata:
raise ValueError(
f"Model version not found on CivitAI for ID: {model_id}"
+ (f" with version: {model_version_id}" if model_version_id else "")
)
primary_model_file: Optional[Dict[str, Any]] = None
for file_info in civitai_metadata.get("files", []):
if file_info.get("primary", False) and file_info.get("type") == "Model":
primary_model_file = file_info
break
if primary_model_file and primary_model_file.get("hashes", {}).get("SHA256"):
metadata["sha256"] = primary_model_file["hashes"]["SHA256"].lower()
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
await self.update_model_metadata(
metadata_path,
metadata,
civitai_metadata,
provider,
)
return metadata
async def save_metadata_updates(
self,
*,
file_path: str,
updates: Dict[str, Any],
metadata_loader: Callable[[str], Awaitable[Dict[str, Any]]],
update_cache: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
) -> Dict[str, Any]:
"""Apply metadata updates and persist to disk and cache."""
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
metadata = await metadata_loader(metadata_path)
for key, value in updates.items():
if isinstance(value, dict) and isinstance(metadata.get(key), dict):
metadata[key].update(value)
else:
metadata[key] = value
await self._metadata_manager.save_metadata(file_path, metadata)
await update_cache(file_path, file_path, metadata)
if "model_name" in updates:
logger.debug("Metadata update touched model_name; cache resort required")
return metadata
async def verify_duplicate_hashes(
self,
*,
file_paths: Iterable[str],
metadata_loader: Callable[[str], Awaitable[Dict[str, Any]]],
hash_calculator: Callable[[str], Awaitable[str]],
update_cache: Callable[[str, str, Dict[str, Any]], Awaitable[bool]],
) -> Dict[str, Any]:
"""Verify a collection of files share the same SHA256 hash."""
file_paths = list(file_paths)
if not file_paths:
raise ValueError("No file paths provided for verification")
results = {
"verified_as_duplicates": True,
"mismatched_files": [],
"new_hash_map": {},
}
expected_hash: Optional[str] = None
first_metadata_path = os.path.splitext(file_paths[0])[0] + ".metadata.json"
first_metadata = await metadata_loader(first_metadata_path)
if first_metadata and "sha256" in first_metadata:
expected_hash = first_metadata["sha256"].lower()
for path in file_paths:
if not os.path.exists(path):
continue
try:
actual_hash = await hash_calculator(path)
metadata_path = os.path.splitext(path)[0] + ".metadata.json"
metadata = await metadata_loader(metadata_path)
stored_hash = metadata.get("sha256", "").lower()
if not expected_hash:
expected_hash = stored_hash
if actual_hash != expected_hash:
results["verified_as_duplicates"] = False
results["mismatched_files"].append(path)
results["new_hash_map"][path] = actual_hash
if actual_hash != stored_hash:
metadata["sha256"] = actual_hash
await self._metadata_manager.save_metadata(path, metadata)
await update_cache(path, path, metadata)
except Exception as exc: # pragma: no cover - defensive path
logger.error("Error verifying hash for %s: %s", path, exc)
results["mismatched_files"].append(path)
results["new_hash_map"][path] = "error_calculating_hash"
results["verified_as_duplicates"] = False
return results

View File

@@ -0,0 +1,245 @@
"""Service routines for model lifecycle mutations."""
from __future__ import annotations
import logging
import os
from typing import Awaitable, Callable, Dict, Iterable, List, Optional
from ..services.service_registry import ServiceRegistry
from ..utils.constants import PREVIEW_EXTENSIONS
logger = logging.getLogger(__name__)
async def delete_model_artifacts(target_dir: str, file_name: str) -> List[str]:
"""Delete the primary model artefacts within ``target_dir``."""
patterns = [
f"{file_name}.safetensors",
f"{file_name}.metadata.json",
]
for ext in PREVIEW_EXTENSIONS:
patterns.append(f"{file_name}{ext}")
deleted: List[str] = []
main_file = patterns[0]
main_path = os.path.join(target_dir, main_file).replace(os.sep, "/")
if os.path.exists(main_path):
os.remove(main_path)
deleted.append(main_path)
else:
logger.warning("Model file not found: %s", main_file)
for pattern in patterns[1:]:
path = os.path.join(target_dir, pattern)
if os.path.exists(path):
try:
os.remove(path)
deleted.append(pattern)
except Exception as exc: # pragma: no cover - defensive path
logger.warning("Failed to delete %s: %s", pattern, exc)
return deleted
class ModelLifecycleService:
"""Co-ordinate destructive and mutating model operations."""
def __init__(
self,
*,
scanner,
metadata_manager,
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
recipe_scanner_factory: Callable[[], Awaitable] | None = None,
) -> None:
self._scanner = scanner
self._metadata_manager = metadata_manager
self._metadata_loader = metadata_loader
self._recipe_scanner_factory = (
recipe_scanner_factory or ServiceRegistry.get_recipe_scanner
)
async def delete_model(self, file_path: str) -> Dict[str, object]:
"""Delete a model file and associated artefacts."""
if not file_path:
raise ValueError("Model path is required")
target_dir = os.path.dirname(file_path)
file_name = os.path.splitext(os.path.basename(file_path))[0]
deleted_files = await delete_model_artifacts(target_dir, file_name)
cache = await self._scanner.get_cached_data()
cache.raw_data = [item for item in cache.raw_data if item["file_path"] != file_path]
await cache.resort()
if hasattr(self._scanner, "_hash_index") and self._scanner._hash_index:
self._scanner._hash_index.remove_by_path(file_path)
return {"success": True, "deleted_files": deleted_files}
async def exclude_model(self, file_path: str) -> Dict[str, object]:
"""Mark a model as excluded and prune cache references."""
if not file_path:
raise ValueError("Model path is required")
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
metadata = await self._metadata_loader(metadata_path)
metadata["exclude"] = True
await self._metadata_manager.save_metadata(file_path, metadata)
cache = await self._scanner.get_cached_data()
model_to_remove = next(
(item for item in cache.raw_data if item["file_path"] == file_path),
None,
)
if model_to_remove:
for tag in model_to_remove.get("tags", []):
if tag in getattr(self._scanner, "_tags_count", {}):
self._scanner._tags_count[tag] = max(
0, self._scanner._tags_count[tag] - 1
)
if self._scanner._tags_count[tag] == 0:
del self._scanner._tags_count[tag]
if hasattr(self._scanner, "_hash_index") and self._scanner._hash_index:
self._scanner._hash_index.remove_by_path(file_path)
cache.raw_data = [
item for item in cache.raw_data if item["file_path"] != file_path
]
await cache.resort()
excluded = getattr(self._scanner, "_excluded_models", None)
if isinstance(excluded, list):
excluded.append(file_path)
message = f"Model {os.path.basename(file_path)} excluded"
return {"success": True, "message": message}
async def bulk_delete_models(self, file_paths: Iterable[str]) -> Dict[str, object]:
"""Delete a collection of models via the scanner bulk operation."""
file_paths = list(file_paths)
if not file_paths:
raise ValueError("No file paths provided for deletion")
return await self._scanner.bulk_delete_models(file_paths)
async def rename_model(
self, *, file_path: str, new_file_name: str
) -> Dict[str, object]:
"""Rename a model and its companion artefacts."""
if not file_path or not new_file_name:
raise ValueError("File path and new file name are required")
invalid_chars = {"/", "\\", ":", "*", "?", '"', "<", ">", "|"}
if any(char in new_file_name for char in invalid_chars):
raise ValueError("Invalid characters in file name")
target_dir = os.path.dirname(file_path)
old_file_name = os.path.splitext(os.path.basename(file_path))[0]
new_file_path = os.path.join(target_dir, f"{new_file_name}.safetensors").replace(
os.sep, "/"
)
if os.path.exists(new_file_path):
raise ValueError("A file with this name already exists")
patterns = [
f"{old_file_name}.safetensors",
f"{old_file_name}.metadata.json",
f"{old_file_name}.metadata.json.bak",
]
for ext in PREVIEW_EXTENSIONS:
patterns.append(f"{old_file_name}{ext}")
existing_files: List[tuple[str, str]] = []
for pattern in patterns:
path = os.path.join(target_dir, pattern)
if os.path.exists(path):
existing_files.append((path, pattern))
metadata_path = os.path.join(target_dir, f"{old_file_name}.metadata.json")
metadata: Optional[Dict[str, object]] = None
hash_value: Optional[str] = None
if os.path.exists(metadata_path):
metadata = await self._metadata_loader(metadata_path)
hash_value = metadata.get("sha256") if isinstance(metadata, dict) else None
renamed_files: List[str] = []
new_metadata_path: Optional[str] = None
new_preview: Optional[str] = None
for old_path, pattern in existing_files:
ext = self._get_multipart_ext(pattern)
new_path = os.path.join(target_dir, f"{new_file_name}{ext}").replace(
os.sep, "/"
)
os.rename(old_path, new_path)
renamed_files.append(new_path)
if ext == ".metadata.json":
new_metadata_path = new_path
if metadata and new_metadata_path:
metadata["file_name"] = new_file_name
metadata["file_path"] = new_file_path
if metadata.get("preview_url"):
old_preview = str(metadata["preview_url"])
ext = self._get_multipart_ext(old_preview)
new_preview = os.path.join(target_dir, f"{new_file_name}{ext}").replace(
os.sep, "/"
)
metadata["preview_url"] = new_preview
await self._metadata_manager.save_metadata(new_file_path, metadata)
if metadata:
await self._scanner.update_single_model_cache(
file_path, new_file_path, metadata
)
if hash_value and getattr(self._scanner, "model_type", "") == "lora":
recipe_scanner = await self._recipe_scanner_factory()
if recipe_scanner:
try:
await recipe_scanner.update_lora_filename_by_hash(
hash_value, new_file_name
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error(
"Error updating recipe references for %s: %s",
file_path,
exc,
)
return {
"success": True,
"new_file_path": new_file_path,
"new_preview_path": new_preview,
"renamed_files": renamed_files,
"reload_required": False,
}
@staticmethod
def _get_multipart_ext(filename: str) -> str:
"""Return the extension for files with compound suffixes."""
parts = filename.split(".")
if len(parts) == 3:
return "." + ".".join(parts[-2:])
if len(parts) >= 4:
return "." + ".".join(parts[-3:])
return os.path.splitext(filename)[1]

View File

@@ -1,11 +1,41 @@
from abc import ABC, abstractmethod
import json
import aiosqlite
import logging
from bs4 import BeautifulSoup
from typing import Optional, Dict, Tuple
from typing import Optional, Dict, Tuple, Any
from .downloader import get_downloader
try:
from bs4 import BeautifulSoup
except ImportError as exc:
BeautifulSoup = None # type: ignore[assignment]
_BS4_IMPORT_ERROR = exc
else:
_BS4_IMPORT_ERROR = None
try:
import aiosqlite
except ImportError as exc:
aiosqlite = None # type: ignore[assignment]
_AIOSQLITE_IMPORT_ERROR = exc
else:
_AIOSQLITE_IMPORT_ERROR = None
def _require_beautifulsoup() -> Any:
if BeautifulSoup is None:
raise RuntimeError(
"BeautifulSoup (bs4) is required for CivArchiveModelMetadataProvider. "
"Install it with 'pip install beautifulsoup4'."
) from _BS4_IMPORT_ERROR
return BeautifulSoup
def _require_aiosqlite() -> Any:
if aiosqlite is None:
raise RuntimeError(
"aiosqlite is required for SQLiteModelMetadataProvider. "
"Install it with 'pip install aiosqlite'."
) from _AIOSQLITE_IMPORT_ERROR
return aiosqlite
logger = logging.getLogger(__name__)
class ModelMetadataProvider(ABC):
@@ -78,7 +108,8 @@ class CivArchiveModelMetadataProvider(ModelMetadataProvider):
html_content = await response.text()
# Parse HTML to extract JSON data
soup = BeautifulSoup(html_content, 'html.parser')
soup_parser = _require_beautifulsoup()
soup = soup_parser(html_content, 'html.parser')
script_tag = soup.find('script', {'id': '__NEXT_DATA__', 'type': 'application/json'})
if not script_tag:
@@ -171,10 +202,11 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
def __init__(self, db_path: str):
self.db_path = db_path
self._aiosqlite = _require_aiosqlite()
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
"""Find model by hash value from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
async with self._aiosqlite.connect(self.db_path) as db:
# Look up in model_files table to get model_id and version_id
query = """
SELECT model_id, version_id
@@ -182,7 +214,7 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
WHERE sha256 = ?
LIMIT 1
"""
db.row_factory = aiosqlite.Row
db.row_factory = self._aiosqlite.Row
cursor = await db.execute(query, (model_hash.upper(),))
file_row = await cursor.fetchone()
@@ -199,8 +231,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
"""Get all versions of a model from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
async with self._aiosqlite.connect(self.db_path) as db:
db.row_factory = self._aiosqlite.Row
# First check if model exists
model_query = "SELECT * FROM models WHERE id = ?"
@@ -258,8 +290,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
if not model_id and not version_id:
return None
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
async with self._aiosqlite.connect(self.db_path) as db:
db.row_factory = self._aiosqlite.Row
# Case 1: Only version_id is provided
if model_id is None and version_id is not None:
@@ -295,8 +327,8 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
"""Fetch model version metadata from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
async with self._aiosqlite.connect(self.db_path) as db:
db.row_factory = self._aiosqlite.Row
# Get version details
version_query = "SELECT model_id FROM model_versions WHERE id = ?"
@@ -357,6 +389,45 @@ class SQLiteModelMetadataProvider(ModelMetadataProvider):
# Add any additional fields from version data
result.update(version_data)
# Attach files associated with this version from model_files table
files_query = """
SELECT data
FROM model_files
WHERE version_id = ? AND type = 'Model'
ORDER BY id ASC
"""
cursor = await db.execute(files_query, (version_id,))
file_rows = await cursor.fetchall()
files = []
for file_row in file_rows:
try:
file_data = json.loads(file_row['data'])
except json.JSONDecodeError:
logger.warning(
"Skipping model_files entry with invalid JSON for version_id %s", version_id
)
continue
# Remove 'modelId' and 'modelVersionId' fields if present
file_data.pop('modelId', None)
file_data.pop('modelVersionId', None)
files.append(file_data)
if 'files' in result:
existing_files = result['files']
if isinstance(existing_files, list):
existing_files.extend(files)
result['files'] = existing_files
else:
merged_files = files.copy()
if existing_files:
merged_files.insert(0, existing_files)
result['files'] = merged_files
elif files:
result['files'] = files
else:
result['files'] = []
return result
except json.JSONDecodeError:
return None

196
py/services/model_query.py Normal file
View File

@@ -0,0 +1,196 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Protocol, Callable
from ..utils.constants import NSFW_LEVELS
from ..utils.utils import fuzzy_match as default_fuzzy_match
class SettingsProvider(Protocol):
"""Protocol describing the SettingsManager contract used by query helpers."""
def get(self, key: str, default: Any = None) -> Any:
...
@dataclass(frozen=True)
class SortParams:
"""Normalized representation of sorting instructions."""
key: str
order: str
@dataclass(frozen=True)
class FilterCriteria:
"""Container for model list filtering options."""
folder: Optional[str] = None
base_models: Optional[Sequence[str]] = None
tags: Optional[Sequence[str]] = None
favorites_only: bool = False
search_options: Optional[Dict[str, Any]] = None
class ModelCacheRepository:
"""Adapter around scanner cache access and sort normalisation."""
def __init__(self, scanner) -> None:
self._scanner = scanner
async def get_cache(self):
"""Return the underlying cache instance from the scanner."""
return await self._scanner.get_cached_data()
async def fetch_sorted(self, params: SortParams) -> List[Dict[str, Any]]:
"""Fetch cached data pre-sorted according to ``params``."""
cache = await self.get_cache()
return await cache.get_sorted_data(params.key, params.order)
@staticmethod
def parse_sort(sort_by: str) -> SortParams:
"""Parse an incoming sort string into key/order primitives."""
if not sort_by:
return SortParams(key="name", order="asc")
if ":" in sort_by:
raw_key, raw_order = sort_by.split(":", 1)
sort_key = raw_key.strip().lower() or "name"
order = raw_order.strip().lower()
else:
sort_key = sort_by.strip().lower() or "name"
order = "asc"
if order not in ("asc", "desc"):
order = "asc"
return SortParams(key=sort_key, order=order)
class ModelFilterSet:
"""Applies common filtering rules to the model collection."""
def __init__(self, settings: SettingsProvider, nsfw_levels: Optional[Dict[str, int]] = None) -> None:
self._settings = settings
self._nsfw_levels = nsfw_levels or NSFW_LEVELS
def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]:
"""Return items that satisfy the provided criteria."""
items = list(data)
if self._settings.get("show_only_sfw", False):
threshold = self._nsfw_levels.get("R", 0)
items = [
item for item in items
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
]
if criteria.favorites_only:
items = [item for item in items if item.get("favorite", False)]
folder = criteria.folder
options = criteria.search_options or {}
recursive = bool(options.get("recursive", True))
if folder is not None:
if recursive:
if folder:
folder_with_sep = f"{folder}/"
items = [
item for item in items
if item.get("folder") == folder or item.get("folder", "").startswith(folder_with_sep)
]
else:
items = [item for item in items if item.get("folder") == folder]
base_models = criteria.base_models or []
if base_models:
base_model_set = set(base_models)
items = [item for item in items if item.get("base_model") in base_model_set]
tags = criteria.tags or []
if tags:
tag_set = set(tags)
items = [
item for item in items
if any(tag in tag_set for tag in item.get("tags", []))
]
return items
class SearchStrategy:
"""Encapsulates text and fuzzy matching behaviour for model queries."""
DEFAULT_OPTIONS: Dict[str, Any] = {
"filename": True,
"modelname": True,
"tags": False,
"recursive": True,
"creator": False,
}
def __init__(self, fuzzy_matcher: Optional[Callable[[str, str], bool]] = None) -> None:
self._fuzzy_match = fuzzy_matcher or default_fuzzy_match
def normalize_options(self, options: Optional[Dict[str, Any]]) -> Dict[str, Any]:
"""Merge provided options with defaults without mutating input."""
normalized = dict(self.DEFAULT_OPTIONS)
if options:
normalized.update(options)
return normalized
def apply(
self,
data: Iterable[Dict[str, Any]],
search_term: str,
options: Dict[str, Any],
fuzzy: bool = False,
) -> List[Dict[str, Any]]:
"""Return items matching the search term using the configured strategy."""
if not search_term:
return list(data)
search_lower = search_term.lower()
results: List[Dict[str, Any]] = []
for item in data:
if options.get("filename", True):
candidate = item.get("file_name", "")
if self._matches(candidate, search_term, search_lower, fuzzy):
results.append(item)
continue
if options.get("modelname", True):
candidate = item.get("model_name", "")
if self._matches(candidate, search_term, search_lower, fuzzy):
results.append(item)
continue
if options.get("tags", False):
tags = item.get("tags", []) or []
if any(self._matches(tag, search_term, search_lower, fuzzy) for tag in tags):
results.append(item)
continue
if options.get("creator", False):
creator_username = ""
civitai = item.get("civitai")
if isinstance(civitai, dict):
creator = civitai.get("creator")
if isinstance(creator, dict):
creator_username = creator.get("username", "")
if creator_username and self._matches(creator_username, search_term, search_lower, fuzzy):
results.append(item)
continue
return results
def _matches(self, candidate: str, search_term: str, search_lower: str, fuzzy: bool) -> bool:
if not candidate:
return False
candidate_lower = candidate.lower()
if fuzzy:
return self._fuzzy_match(candidate, search_term)
return search_lower in candidate_lower

View File

@@ -4,7 +4,8 @@ import logging
import asyncio
import time
import shutil
from typing import List, Dict, Optional, Type, Set
from dataclasses import dataclass
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional, Set, Type, Union
from ..utils.models import BaseModelMetadata
from ..config import config
@@ -13,11 +14,23 @@ from ..utils.metadata_manager import MetadataManager
from .model_cache import ModelCache
from .model_hash_index import ModelHashIndex
from ..utils.constants import PREVIEW_EXTENSIONS
from .model_lifecycle_service import delete_model_artifacts
from .service_registry import ServiceRegistry
from .websocket_manager import ws_manager
from .persistent_model_cache import get_persistent_cache
logger = logging.getLogger(__name__)
@dataclass
class CacheBuildResult:
"""Represents the outcome of scanning model files for cache building."""
raw_data: List[Dict]
hash_index: ModelHashIndex
tags_count: Dict[str, int]
excluded_models: List[str]
class ModelScanner:
"""Base service for scanning and managing model files"""
@@ -67,16 +80,123 @@ class ModelScanner:
self._tags_count = {} # Dictionary to store tag counts
self._is_initializing = False # Flag to track initialization state
self._excluded_models = [] # List to track excluded models
self._persistent_cache = get_persistent_cache()
self._initialized = True
# Register this service
asyncio.create_task(self._register_service())
def on_library_changed(self) -> None:
"""Reset caches when the active library changes."""
self._persistent_cache = get_persistent_cache()
self._cache = None
self._hash_index = ModelHashIndex()
self._tags_count = {}
self._excluded_models = []
self._is_initializing = False
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = None
if loop and not loop.is_closed():
loop.create_task(self.initialize_in_background())
async def _register_service(self):
"""Register this instance with the ServiceRegistry"""
service_name = f"{self.model_type}_scanner"
await ServiceRegistry.register_service(service_name, self)
def _slim_civitai_payload(self, civitai: Optional[Mapping[str, Any]]) -> Optional[Dict[str, Any]]:
"""Return a lightweight civitai payload containing only frequently used keys."""
if not isinstance(civitai, Mapping) or not civitai:
return None
slim: Dict[str, Any] = {}
for key in ('id', 'modelId', 'name'):
value = civitai.get(key)
if value not in (None, '', []):
slim[key] = value
trained_words = civitai.get('trainedWords')
if trained_words:
slim['trainedWords'] = list(trained_words) if isinstance(trained_words, list) else trained_words
return slim or None
def _build_cache_entry(
self,
source: Union[BaseModelMetadata, Mapping[str, Any]],
*,
folder: Optional[str] = None,
file_path_override: Optional[str] = None
) -> Dict[str, Any]:
"""Project metadata into the lightweight cache representation."""
is_mapping = isinstance(source, Mapping)
def get_value(key: str, default: Any = None) -> Any:
if is_mapping:
return source.get(key, default)
return getattr(source, key, default)
file_path = file_path_override or get_value('file_path', '') or ''
normalized_path = file_path.replace('\\', '/')
folder_value = folder if folder is not None else get_value('folder', '') or ''
normalized_folder = folder_value.replace('\\', '/')
tags_value = get_value('tags') or []
if isinstance(tags_value, list):
tags_list = list(tags_value)
elif isinstance(tags_value, (set, tuple)):
tags_list = list(tags_value)
else:
tags_list = []
preview_url = get_value('preview_url', '') or ''
if isinstance(preview_url, str):
preview_url = preview_url.replace('\\', '/')
else:
preview_url = ''
civitai_slim = self._slim_civitai_payload(get_value('civitai'))
usage_tips = get_value('usage_tips', '') or ''
if not isinstance(usage_tips, str):
usage_tips = str(usage_tips)
notes = get_value('notes', '') or ''
if not isinstance(notes, str):
notes = str(notes)
entry: Dict[str, Any] = {
'file_path': normalized_path,
'file_name': get_value('file_name', '') or '',
'model_name': get_value('model_name', '') or '',
'folder': normalized_folder,
'size': int(get_value('size', 0) or 0),
'modified': float(get_value('modified', 0.0) or 0.0),
'sha256': (get_value('sha256', '') or '').lower(),
'base_model': get_value('base_model', '') or '',
'preview_url': preview_url,
'preview_nsfw_level': int(get_value('preview_nsfw_level', 0) or 0),
'from_civitai': bool(get_value('from_civitai', True)),
'favorite': bool(get_value('favorite', False)),
'notes': notes,
'usage_tips': usage_tips,
'exclude': bool(get_value('exclude', False)),
'db_checked': bool(get_value('db_checked', False)),
'last_checked_at': float(get_value('last_checked_at', 0.0) or 0.0),
'tags': tags_list,
'civitai': civitai_slim,
'civitai_deleted': bool(get_value('civitai_deleted', False)),
}
model_type = get_value('model_type', None)
if model_type:
entry['model_type'] = model_type
return entry
async def initialize_in_background(self) -> None:
"""Initialize cache in background using thread pool"""
try:
@@ -91,7 +211,12 @@ class ModelScanner:
self._is_initializing = True
# Determine the page type based on model type
page_type = 'loras' if self.model_type == 'lora' else 'checkpoints'
page_type_map = {
'lora': 'loras',
'checkpoint': 'checkpoints',
'embedding': 'embeddings'
}
page_type = page_type_map.get(self.model_type, self.model_type)
# First, try to load from cache
await ws_manager.broadcast_init_progress({
@@ -101,8 +226,25 @@ class ModelScanner:
'scanner_type': self.model_type,
'pageType': page_type
})
# If cache loading failed, proceed with full scan
cache_loaded = await self._load_persisted_cache(page_type)
if cache_loaded:
await asyncio.sleep(0) # Yield control so the UI can process the cache hydration update
await ws_manager.broadcast_init_progress({
'stage': 'finalizing',
'progress': 100,
'status': 'complete',
'details': f"Loaded {len(self._cache.raw_data)} cached {self.model_type} files from disk.",
'scanner_type': self.model_type,
'pageType': page_type
})
logger.info(
f"{self.model_type.capitalize()} cache hydrated from persisted snapshot with {len(self._cache.raw_data)} models"
)
return
# Persistent load failed; fall back to a full scan
await ws_manager.broadcast_init_progress({
'stage': 'scan_folders',
'progress': 0,
@@ -129,13 +271,17 @@ class ModelScanner:
start_time = time.time()
# Use thread pool to execute CPU-intensive operations with progress reporting
await loop.run_in_executor(
scan_result: Optional[CacheBuildResult] = await loop.run_in_executor(
None, # Use default thread pool
self._initialize_cache_sync, # Run synchronous version in thread
total_files, # Pass the total file count for progress reporting
page_type # Pass the page type for progress reporting
)
if scan_result:
await self._apply_scan_result(scan_result)
await self._save_persistent_cache(scan_result)
# Send final progress update
await ws_manager.broadcast_init_progress({
'stage': 'finalizing',
@@ -164,6 +310,105 @@ class ModelScanner:
# Always clear the initializing flag when done
self._is_initializing = False
async def _load_persisted_cache(self, page_type: str) -> bool:
"""Attempt to hydrate the in-memory cache from the SQLite snapshot."""
if not getattr(self, '_persistent_cache', None):
return False
loop = asyncio.get_event_loop()
try:
persisted = await loop.run_in_executor(
None,
self._persistent_cache.load_cache,
self.model_type
)
except FileNotFoundError:
return False
except Exception as exc:
logger.debug("%s Scanner: Could not load persisted cache: %s", self.model_type.capitalize(), exc)
return False
if not persisted or not persisted.raw_data:
return False
hash_index = ModelHashIndex()
for sha_value, path in persisted.hash_rows:
if sha_value and path:
hash_index.add_entry(sha_value.lower(), path)
tags_count: Dict[str, int] = {}
for item in persisted.raw_data:
for tag in item.get('tags') or []:
tags_count[tag] = tags_count.get(tag, 0) + 1
scan_result = CacheBuildResult(
raw_data=list(persisted.raw_data),
hash_index=hash_index,
tags_count=tags_count,
excluded_models=list(persisted.excluded_models)
)
await self._apply_scan_result(scan_result)
await ws_manager.broadcast_init_progress({
'stage': 'loading_cache',
'progress': 1,
'details': f"Loaded cached {self.model_type} data from disk",
'scanner_type': self.model_type,
'pageType': page_type
})
return True
async def _save_persistent_cache(self, scan_result: CacheBuildResult) -> None:
if not scan_result or not getattr(self, '_persistent_cache', None):
return
hash_snapshot = self._build_hash_index_snapshot(scan_result.hash_index)
loop = asyncio.get_event_loop()
try:
await loop.run_in_executor(
None,
self._persistent_cache.save_cache,
self.model_type,
list(scan_result.raw_data),
hash_snapshot,
list(scan_result.excluded_models)
)
except Exception as exc:
logger.warning("%s Scanner: Failed to persist cache: %s", self.model_type.capitalize(), exc)
def _build_hash_index_snapshot(self, hash_index: Optional[ModelHashIndex]) -> Dict[str, List[str]]:
snapshot: Dict[str, List[str]] = {}
if not hash_index:
return snapshot
for sha_value, path in getattr(hash_index, '_hash_to_path', {}).items():
if not sha_value or not path:
continue
bucket = snapshot.setdefault(sha_value.lower(), [])
if path not in bucket:
bucket.append(path)
for sha_value, paths in getattr(hash_index, '_duplicate_hashes', {}).items():
if not sha_value:
continue
bucket = snapshot.setdefault(sha_value.lower(), [])
for path in paths:
if path and path not in bucket:
bucket.append(path)
return snapshot
async def _persist_current_cache(self) -> None:
if self._cache is None or not getattr(self, '_persistent_cache', None):
return
snapshot = CacheBuildResult(
raw_data=list(self._cache.raw_data),
hash_index=self._hash_index,
tags_count=dict(self._tags_count),
excluded_models=list(self._excluded_models)
)
await self._save_persistent_cache(snapshot)
def _count_model_files(self) -> int:
"""Count all model files with supported extensions in all roots
@@ -203,124 +448,53 @@ class ModelScanner:
return total_files
def _initialize_cache_sync(self, total_files=0, page_type='loras'):
def _initialize_cache_sync(self, total_files: int = 0, page_type: str = 'loras') -> Optional[CacheBuildResult]:
"""Synchronous version of cache initialization for thread pool execution"""
loop = asyncio.new_event_loop()
try:
# Create a new event loop for this thread
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Create a synchronous method to bypass the async lock
def sync_initialize_cache():
# Track progress
processed_files = 0
last_progress_time = time.time()
last_progress_percent = 0
# We need a wrapper around scan_all_models to track progress
# This is a local function that will run in our thread's event loop
async def scan_with_progress():
nonlocal processed_files, last_progress_time, last_progress_percent
# For storing raw model data
all_models = []
# Process each model root
for root_path in self.get_model_roots():
if not os.path.exists(root_path):
continue
# Track visited paths to avoid symlink loops
visited_paths = set()
# Recursively process directory
async def scan_dir_with_progress(path):
nonlocal processed_files, last_progress_time, last_progress_percent
try:
real_path = os.path.realpath(path)
if real_path in visited_paths:
return
visited_paths.add(real_path)
with os.scandir(path) as it:
entries = list(it)
for entry in entries:
try:
if entry.is_file(follow_symlinks=True):
ext = os.path.splitext(entry.name)[1].lower()
if ext in self.file_extensions:
file_path = entry.path.replace(os.sep, "/")
result = await self._process_model_file(file_path, root_path)
if result:
all_models.append(result)
# Update progress counter
processed_files += 1
# Update progress periodically (not every file to avoid excessive updates)
current_time = time.time()
if total_files > 0 and (current_time - last_progress_time > 0.5 or processed_files == total_files):
# Adjusted progress calculation
progress_percent = min(99, int(1 + (processed_files / total_files) * 98))
if progress_percent > last_progress_percent:
last_progress_percent = progress_percent
last_progress_time = current_time
# Send progress update through websocket
await ws_manager.broadcast_init_progress({
'stage': 'process_models',
'progress': progress_percent,
'details': f"Processing {self.model_type} files: {processed_files}/{total_files}",
'scanner_type': self.model_type,
'pageType': page_type
})
elif entry.is_dir(follow_symlinks=True):
await scan_dir_with_progress(entry.path)
except Exception as e:
logger.error(f"Error processing entry {entry.path}: {e}")
except Exception as e:
logger.error(f"Error scanning {path}: {e}")
# Process the root path
await scan_dir_with_progress(root_path)
return all_models
# Run the progress-tracking scan function
raw_data = loop.run_until_complete(scan_with_progress())
# Update hash index and tags count
for model_data in raw_data:
if 'sha256' in model_data and 'file_path' in model_data:
self._hash_index.add_entry(model_data['sha256'].lower(), model_data['file_path'])
# Count tags
if 'tags' in model_data and model_data['tags']:
for tag in model_data['tags']:
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
# Log duplicate filename warnings after building the index
# duplicate_filenames = self._hash_index.get_duplicate_filenames()
# if duplicate_filenames:
# logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:")
# for filename, paths in duplicate_filenames.items():
# logger.warning(f" Duplicate filename '{filename}': {paths}")
# Update cache
self._cache.raw_data = raw_data
loop.run_until_complete(self._cache.resort())
return self._cache
# Run our sync initialization that avoids lock conflicts
return sync_initialize_cache()
last_progress_time = time.time()
last_progress_percent = 0
async def progress_callback(processed_files: int, expected_total: int) -> None:
nonlocal last_progress_time, last_progress_percent
if expected_total <= 0:
return
current_time = time.time()
progress_percent = min(99, int(1 + (processed_files / expected_total) * 98))
if progress_percent <= last_progress_percent:
return
if current_time - last_progress_time <= 0.5 and processed_files != expected_total:
return
last_progress_percent = progress_percent
last_progress_time = current_time
await ws_manager.broadcast_init_progress({
'stage': 'process_models',
'progress': progress_percent,
'details': f"Processing {self.model_type} files: {processed_files}/{expected_total}",
'scanner_type': self.model_type,
'pageType': page_type
})
return loop.run_until_complete(
self._gather_model_data(
total_files=total_files,
progress_callback=progress_callback
)
)
except Exception as e:
logger.error(f"Error in thread-based {self.model_type} cache initialization: {e}")
return None
finally:
# Clean up the event loop
asyncio.set_event_loop(None)
loop.close()
async def get_cached_data(self, force_refresh: bool = False, rebuild_cache: bool = False) -> ModelCache:
@@ -352,45 +526,16 @@ class ModelScanner:
self._is_initializing = True # Set flag
try:
start_time = time.time()
# Clear existing hash index
self._hash_index.clear()
# Clear existing tags count
self._tags_count = {}
# Determine the page type based on model type
page_type = 'loras' if self.model_type == 'lora' else 'checkpoints'
# Scan for new data
raw_data = await self.scan_all_models()
# Build hash index and tags count
for model_data in raw_data:
if 'sha256' in model_data and 'file_path' in model_data:
self._hash_index.add_entry(model_data['sha256'].lower(), model_data['file_path'])
# Count tags
if 'tags' in model_data and model_data['tags']:
for tag in model_data['tags']:
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
# Log duplicate filename warnings after building the index
# duplicate_filenames = self._hash_index.get_duplicate_filenames()
# if duplicate_filenames:
# logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:")
# for filename, paths in duplicate_filenames.items():
# logger.warning(f" Duplicate filename '{filename}': {paths}")
# Update cache
self._cache = ModelCache(
raw_data=raw_data,
folders=[]
)
# Resort cache
await self._cache.resort()
scan_result = await self._gather_model_data()
await self._apply_scan_result(scan_result)
await self._save_persistent_cache(scan_result)
logger.info(f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, found {len(raw_data)} models")
logger.info(
f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, "
f"found {len(scan_result.raw_data)} models"
)
except Exception as e:
logger.error(f"{self.model_type.capitalize()} Scanner: Error initializing cache: {e}")
# Ensure cache is at least an empty structure on error
@@ -475,8 +620,12 @@ class ModelScanner:
try:
# Find the appropriate root path for this file
root_path = None
for potential_root in self.get_model_roots():
if path.startswith(potential_root):
model_roots = self.get_model_roots()
for potential_root in model_roots:
# Normalize both paths for comparison
normalized_path = os.path.normpath(path)
normalized_root = os.path.normpath(potential_root)
if normalized_path.startswith(normalized_root):
root_path = potential_root
break
@@ -534,70 +683,17 @@ class ModelScanner:
# Update folders list
all_folders = set(item.get('folder', '') for item in self._cache.raw_data)
self._cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
# Resort cache
await self._cache.resort()
await self._persist_current_cache()
logger.info(f"{self.model_type.capitalize()} Scanner: Cache reconciliation completed in {time.time() - start_time:.2f} seconds. Added {total_added}, removed {total_removed} models.")
except Exception as e:
logger.error(f"{self.model_type.capitalize()} Scanner: Error reconciling cache: {e}", exc_info=True)
finally:
self._is_initializing = False # Unset flag
async def scan_all_models(self) -> List[Dict]:
"""Scan all model directories and return metadata"""
all_models = []
# Create scan tasks for each directory
scan_tasks = []
for model_root in self.get_model_roots():
task = asyncio.create_task(self._scan_directory(model_root))
scan_tasks.append(task)
# Wait for all tasks to complete
for task in scan_tasks:
try:
models = await task
all_models.extend(models)
except Exception as e:
logger.error(f"Error scanning directory: {e}")
return all_models
async def _scan_directory(self, root_path: str) -> List[Dict]:
"""Scan a single directory for model files"""
models = []
original_root = root_path # Save original root path
async def scan_recursive(path: str, visited_paths: set):
"""Recursively scan directory, avoiding circular symlinks"""
try:
real_path = os.path.realpath(path)
if real_path in visited_paths:
logger.debug(f"Skipping already visited path: {path}")
return
visited_paths.add(real_path)
with os.scandir(path) as it:
entries = list(it)
for entry in entries:
try:
if entry.is_file(follow_symlinks=True) and any(entry.name.endswith(ext) for ext in self.file_extensions):
file_path = entry.path.replace(os.sep, "/")
result = await self._process_model_file(file_path, original_root)
# Only add to models if result is not None (skip corrupted metadata)
if result:
models.append(result)
await asyncio.sleep(0)
elif entry.is_dir(follow_symlinks=True):
await scan_recursive(entry.path, visited_paths)
except Exception as e:
logger.error(f"Error processing entry {entry.path}: {e}")
except Exception as e:
logger.error(f"Error scanning {path}: {e}")
await scan_recursive(root_path, set())
return models
def is_initializing(self) -> bool:
"""Check if the scanner is currently initializing"""
@@ -623,8 +719,18 @@ class ModelScanner:
"""Hook for subclasses: adjust metadata during scanning"""
return metadata
async def _process_model_file(self, file_path: str, root_path: str) -> Dict:
async def _process_model_file(
self,
file_path: str,
root_path: str,
*,
hash_index: Optional[ModelHashIndex] = None,
excluded_models: Optional[List[str]] = None
) -> Dict:
"""Process a single model file and return its metadata"""
hash_index = hash_index or self._hash_index
excluded_models = excluded_models if excluded_models is not None else self._excluded_models
metadata, should_skip = await MetadataManager.load_metadata(file_path, self.model_class)
if should_skip:
@@ -684,30 +790,134 @@ class ModelScanner:
# Hook: allow subclasses to adjust metadata
metadata = self.adjust_metadata(metadata, file_path, root_path)
model_data = metadata.to_dict()
# Skip excluded models
if model_data.get('exclude', False):
self._excluded_models.append(model_data['file_path'])
return None
# Check for duplicate filename before adding to hash index
filename = os.path.splitext(os.path.basename(file_path))[0]
existing_hash = self._hash_index.get_hash_by_filename(filename)
if existing_hash and existing_hash != model_data.get('sha256', '').lower():
existing_path = self._hash_index.get_path(existing_hash)
if existing_path and existing_path != file_path:
logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
rel_path = os.path.relpath(file_path, root_path)
folder = os.path.dirname(rel_path)
model_data['folder'] = folder.replace(os.path.sep, '/')
normalized_folder = folder.replace(os.path.sep, '/')
model_data = self._build_cache_entry(metadata, folder=normalized_folder)
# Skip excluded models
if model_data.get('exclude', False):
excluded_models.append(model_data['file_path'])
return None
# Check for duplicate filename before adding to hash index
# filename = os.path.splitext(os.path.basename(file_path))[0]
# existing_hash = hash_index.get_hash_by_filename(filename)
# if existing_hash and existing_hash != model_data.get('sha256', '').lower():
# existing_path = hash_index.get_path(existing_hash)
# if existing_path and existing_path != file_path:
# logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
return model_data
async def _apply_scan_result(self, scan_result: CacheBuildResult) -> None:
"""Apply scan results to the cache and associated indexes."""
if scan_result is None:
return
self._hash_index = scan_result.hash_index
self._tags_count = dict(scan_result.tags_count)
self._excluded_models = list(scan_result.excluded_models)
if self._cache is None:
self._cache = ModelCache(
raw_data=list(scan_result.raw_data),
folders=[]
)
else:
self._cache.raw_data = list(scan_result.raw_data)
await self._cache.resort()
async def _gather_model_data(
self,
*,
total_files: int = 0,
progress_callback: Optional[Callable[[int, int], Awaitable[None]]] = None
) -> CacheBuildResult:
"""Collect metadata for all model files."""
raw_data: List[Dict] = []
hash_index = ModelHashIndex()
tags_count: Dict[str, int] = {}
excluded_models: List[str] = []
processed_files = 0
async def handle_progress() -> None:
if progress_callback is None:
return
try:
await progress_callback(processed_files, total_files)
except Exception as exc: # pragma: no cover - defensive logging
logger.error(f"Error reporting progress for {self.model_type}: {exc}")
async def scan_recursive(current_path: str, root_path: str, visited_paths: Set[str]) -> None:
nonlocal processed_files
try:
real_path = os.path.realpath(current_path)
if real_path in visited_paths:
return
visited_paths.add(real_path)
with os.scandir(current_path) as iterator:
entries = list(iterator)
for entry in entries:
try:
if entry.is_file(follow_symlinks=True):
ext = os.path.splitext(entry.name)[1].lower()
if ext not in self.file_extensions:
continue
file_path = entry.path.replace(os.sep, "/")
result = await self._process_model_file(
file_path,
root_path,
hash_index=hash_index,
excluded_models=excluded_models
)
processed_files += 1
if result:
raw_data.append(result)
sha_value = result.get('sha256')
model_path = result.get('file_path')
if sha_value and model_path:
hash_index.add_entry(sha_value.lower(), model_path)
for tag in result.get('tags') or []:
tags_count[tag] = tags_count.get(tag, 0) + 1
await handle_progress()
await asyncio.sleep(0)
elif entry.is_dir(follow_symlinks=True):
await scan_recursive(entry.path, root_path, visited_paths)
except Exception as entry_error:
logger.error(f"Error processing entry {entry.path}: {entry_error}")
except Exception as scan_error:
logger.error(f"Error scanning {current_path}: {scan_error}")
for model_root in self.get_model_roots():
if not os.path.exists(model_root):
continue
await scan_recursive(model_root, model_root, set())
return CacheBuildResult(
raw_data=raw_data,
hash_index=hash_index,
tags_count=tags_count,
excluded_models=excluded_models
)
async def add_model_to_cache(self, metadata_dict: Dict, folder: str = '') -> bool:
"""Add a model to the cache
Args:
metadata_dict: The model metadata dictionary
folder: The relative folder path for the model
@@ -735,6 +945,7 @@ class ModelScanner:
# Update the hash index
self._hash_index.add_entry(metadata_dict['sha256'], metadata_dict['file_path'])
await self._persist_current_cache()
return True
except Exception as e:
logger.error(f"Error adding model to cache: {e}")
@@ -863,7 +1074,7 @@ class ModelScanner:
async def update_single_model_cache(self, original_path: str, new_path: str, metadata: Dict) -> bool:
"""Update cache after a model has been moved or modified"""
cache = await self.get_cached_data()
existing_item = next((item for item in cache.raw_data if item['file_path'] == original_path), None)
if existing_item and 'tags' in existing_item:
for tag in existing_item.get('tags', []):
@@ -875,35 +1086,42 @@ class ModelScanner:
self._hash_index.remove_by_path(original_path)
cache.raw_data = [
item for item in cache.raw_data
item for item in cache.raw_data
if item['file_path'] != original_path
]
cache_modified = bool(existing_item) or bool(metadata)
if metadata:
if original_path == new_path:
existing_folder = next((item['folder'] for item in cache.raw_data
if item['file_path'] == original_path), None)
if existing_folder:
metadata['folder'] = existing_folder
else:
metadata['folder'] = self._calculate_folder(new_path)
normalized_new_path = new_path.replace(os.sep, '/')
if original_path == new_path and existing_item:
folder_value = existing_item.get('folder', self._calculate_folder(new_path))
else:
metadata['folder'] = self._calculate_folder(new_path)
cache.raw_data.append(metadata)
if 'sha256' in metadata:
self._hash_index.add_entry(metadata['sha256'].lower(), new_path)
folder_value = self._calculate_folder(new_path)
cache_entry = self._build_cache_entry(
metadata,
folder=folder_value,
file_path_override=normalized_new_path,
)
cache.raw_data.append(cache_entry)
sha_value = cache_entry.get('sha256')
if sha_value:
self._hash_index.add_entry(sha_value.lower(), normalized_new_path)
all_folders = set(item['folder'] for item in cache.raw_data)
cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
if 'tags' in metadata:
for tag in metadata.get('tags', []):
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
for tag in cache_entry.get('tags', []):
self._tags_count[tag] = self._tags_count.get(tag, 0) + 1
await cache.resort()
if cache_modified:
await self._persist_current_cache()
return True
def has_hash(self, sha256: str) -> bool:
@@ -1005,7 +1223,10 @@ class ModelScanner:
if self._cache is None:
return False
return await self._cache.update_preview_url(file_path, preview_url, preview_nsfw_level)
updated = await self._cache.update_preview_url(file_path, preview_url, preview_nsfw_level)
if updated:
await self._persist_current_cache()
return updated
async def bulk_delete_models(self, file_paths: List[str]) -> Dict:
"""Delete multiple models and update cache in a batch operation
@@ -1040,10 +1261,8 @@ class ModelScanner:
target_dir = os.path.dirname(file_path)
file_name = os.path.splitext(os.path.basename(file_path))[0]
# Delete all associated files for the model
from ..utils.routes_common import ModelRouteUtils
deleted_files = await ModelRouteUtils.delete_model_files(
target_dir,
deleted_files = await delete_model_artifacts(
target_dir,
file_name
)
@@ -1136,7 +1355,9 @@ class ModelScanner:
# Resort cache
await self._cache.resort()
await self._persist_current_cache()
return True
except Exception as e:

View File

@@ -0,0 +1,357 @@
import json
import logging
import os
import re
import sqlite3
import threading
from dataclasses import dataclass
from typing import Dict, List, Optional, Sequence, Tuple
from ..utils.settings_paths import get_settings_dir
logger = logging.getLogger(__name__)
@dataclass
class PersistedCacheData:
"""Lightweight structure returned by the persistent cache."""
raw_data: List[Dict]
hash_rows: List[Tuple[str, str]]
excluded_models: List[str]
class PersistentModelCache:
"""Persist core model metadata and hash index data in SQLite."""
_DEFAULT_FILENAME = "model_cache.sqlite"
_instances: Dict[str, "PersistentModelCache"] = {}
_instance_lock = threading.Lock()
def __init__(self, library_name: str = "default", db_path: Optional[str] = None) -> None:
self._library_name = library_name or "default"
self._db_path = db_path or self._resolve_default_path(self._library_name)
self._db_lock = threading.Lock()
self._schema_initialized = False
try:
directory = os.path.dirname(self._db_path)
if directory:
os.makedirs(directory, exist_ok=True)
except Exception as exc: # pragma: no cover - defensive guard
logger.warning("Could not create cache directory %s: %s", directory, exc)
if self.is_enabled():
self._initialize_schema()
@classmethod
def get_default(cls, library_name: Optional[str] = None) -> "PersistentModelCache":
name = (library_name or "default")
with cls._instance_lock:
if name not in cls._instances:
cls._instances[name] = cls(name)
return cls._instances[name]
def is_enabled(self) -> bool:
return os.environ.get("LORA_MANAGER_DISABLE_PERSISTENT_CACHE", "0") != "1"
def load_cache(self, model_type: str) -> Optional[PersistedCacheData]:
if not self.is_enabled():
return None
if not self._schema_initialized:
self._initialize_schema()
if not self._schema_initialized:
return None
try:
with self._db_lock:
conn = self._connect(readonly=True)
try:
rows = conn.execute(
"SELECT file_path, file_name, model_name, folder, size, modified, sha256, base_model,"
" preview_url, preview_nsfw_level, from_civitai, favorite, notes, usage_tips,"
" civitai_id, civitai_model_id, civitai_name, trained_words, exclude, db_checked,"
" last_checked_at"
" FROM models WHERE model_type = ?",
(model_type,),
).fetchall()
if not rows:
return None
tags = self._load_tags(conn, model_type)
hash_rows = conn.execute(
"SELECT sha256, file_path FROM hash_index WHERE model_type = ?",
(model_type,),
).fetchall()
excluded = conn.execute(
"SELECT file_path FROM excluded_models WHERE model_type = ?",
(model_type,),
).fetchall()
finally:
conn.close()
except Exception as exc:
logger.warning("Failed to load persisted cache for %s: %s", model_type, exc)
return None
raw_data: List[Dict] = []
for row in rows:
file_path: str = row["file_path"]
trained_words = []
if row["trained_words"]:
try:
trained_words = json.loads(row["trained_words"])
except json.JSONDecodeError:
trained_words = []
civitai: Optional[Dict] = None
if any(row[col] is not None for col in ("civitai_id", "civitai_model_id", "civitai_name")):
civitai = {}
if row["civitai_id"] is not None:
civitai["id"] = row["civitai_id"]
if row["civitai_model_id"] is not None:
civitai["modelId"] = row["civitai_model_id"]
if row["civitai_name"]:
civitai["name"] = row["civitai_name"]
if trained_words:
civitai["trainedWords"] = trained_words
item = {
"file_path": file_path,
"file_name": row["file_name"],
"model_name": row["model_name"],
"folder": row["folder"] or "",
"size": row["size"] or 0,
"modified": row["modified"] or 0.0,
"sha256": row["sha256"] or "",
"base_model": row["base_model"] or "",
"preview_url": row["preview_url"] or "",
"preview_nsfw_level": row["preview_nsfw_level"] or 0,
"from_civitai": bool(row["from_civitai"]),
"favorite": bool(row["favorite"]),
"notes": row["notes"] or "",
"usage_tips": row["usage_tips"] or "",
"exclude": bool(row["exclude"]),
"db_checked": bool(row["db_checked"]),
"last_checked_at": row["last_checked_at"] or 0.0,
"tags": tags.get(file_path, []),
"civitai": civitai,
}
raw_data.append(item)
hash_pairs = [(entry["sha256"].lower(), entry["file_path"]) for entry in hash_rows if entry["sha256"]]
if not hash_pairs:
# Fall back to hashes stored on the model rows
for item in raw_data:
sha_value = item.get("sha256")
if sha_value:
hash_pairs.append((sha_value.lower(), item["file_path"]))
excluded_paths = [row["file_path"] for row in excluded]
return PersistedCacheData(raw_data=raw_data, hash_rows=hash_pairs, excluded_models=excluded_paths)
def save_cache(self, model_type: str, raw_data: Sequence[Dict], hash_index: Dict[str, List[str]], excluded_models: Sequence[str]) -> None:
if not self.is_enabled():
return
if not self._schema_initialized:
self._initialize_schema()
if not self._schema_initialized:
return
try:
with self._db_lock:
conn = self._connect()
try:
conn.execute("PRAGMA foreign_keys = ON")
conn.execute("DELETE FROM models WHERE model_type = ?", (model_type,))
conn.execute("DELETE FROM model_tags WHERE model_type = ?", (model_type,))
conn.execute("DELETE FROM hash_index WHERE model_type = ?", (model_type,))
conn.execute("DELETE FROM excluded_models WHERE model_type = ?", (model_type,))
model_rows = [self._prepare_model_row(model_type, item) for item in raw_data]
conn.executemany(self._insert_model_sql(), model_rows)
tag_rows = []
for item in raw_data:
file_path = item.get("file_path")
if not file_path:
continue
for tag in item.get("tags") or []:
tag_rows.append((model_type, file_path, tag))
if tag_rows:
conn.executemany(
"INSERT INTO model_tags (model_type, file_path, tag) VALUES (?, ?, ?)",
tag_rows,
)
hash_rows: List[Tuple[str, str, str]] = []
for sha_value, paths in hash_index.items():
for path in paths:
if not sha_value or not path:
continue
hash_rows.append((model_type, sha_value.lower(), path))
if hash_rows:
conn.executemany(
"INSERT OR IGNORE INTO hash_index (model_type, sha256, file_path) VALUES (?, ?, ?)",
hash_rows,
)
excluded_rows = [(model_type, path) for path in excluded_models]
if excluded_rows:
conn.executemany(
"INSERT OR IGNORE INTO excluded_models (model_type, file_path) VALUES (?, ?)",
excluded_rows,
)
conn.commit()
finally:
conn.close()
except Exception as exc:
logger.warning("Failed to persist cache for %s: %s", model_type, exc)
# Internal helpers -------------------------------------------------
def _resolve_default_path(self, library_name: str) -> str:
override = os.environ.get("LORA_MANAGER_CACHE_DB")
if override:
return override
try:
settings_dir = get_settings_dir(create=True)
except Exception as exc: # pragma: no cover - defensive guard
logger.warning("Falling back to project directory for cache: %s", exc)
settings_dir = os.path.dirname(os.path.dirname(self._db_path)) if hasattr(self, "_db_path") else os.getcwd()
safe_name = re.sub(r"[^A-Za-z0-9_.-]", "_", library_name or "default")
if safe_name.lower() in ("default", ""):
legacy_path = os.path.join(settings_dir, self._DEFAULT_FILENAME)
if os.path.exists(legacy_path):
return legacy_path
return os.path.join(settings_dir, "model_cache", f"{safe_name}.sqlite")
def _initialize_schema(self) -> None:
with self._db_lock:
if self._schema_initialized:
return
try:
with self._connect() as conn:
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA foreign_keys = ON")
conn.executescript(
"""
CREATE TABLE IF NOT EXISTS models (
model_type TEXT NOT NULL,
file_path TEXT NOT NULL,
file_name TEXT,
model_name TEXT,
folder TEXT,
size INTEGER,
modified REAL,
sha256 TEXT,
base_model TEXT,
preview_url TEXT,
preview_nsfw_level INTEGER,
from_civitai INTEGER,
favorite INTEGER,
notes TEXT,
usage_tips TEXT,
civitai_id INTEGER,
civitai_model_id INTEGER,
civitai_name TEXT,
trained_words TEXT,
exclude INTEGER,
db_checked INTEGER,
last_checked_at REAL,
PRIMARY KEY (model_type, file_path)
);
CREATE TABLE IF NOT EXISTS model_tags (
model_type TEXT NOT NULL,
file_path TEXT NOT NULL,
tag TEXT NOT NULL,
PRIMARY KEY (model_type, file_path, tag)
);
CREATE TABLE IF NOT EXISTS hash_index (
model_type TEXT NOT NULL,
sha256 TEXT NOT NULL,
file_path TEXT NOT NULL,
PRIMARY KEY (model_type, sha256, file_path)
);
CREATE TABLE IF NOT EXISTS excluded_models (
model_type TEXT NOT NULL,
file_path TEXT NOT NULL,
PRIMARY KEY (model_type, file_path)
);
"""
)
conn.commit()
self._schema_initialized = True
except Exception as exc: # pragma: no cover - defensive guard
logger.warning("Failed to initialize persistent cache schema: %s", exc)
def _connect(self, readonly: bool = False) -> sqlite3.Connection:
uri = False
path = self._db_path
if readonly:
if not os.path.exists(path):
raise FileNotFoundError(path)
path = f"file:{path}?mode=ro"
uri = True
conn = sqlite3.connect(path, check_same_thread=False, uri=uri, detect_types=sqlite3.PARSE_DECLTYPES)
conn.row_factory = sqlite3.Row
return conn
def _prepare_model_row(self, model_type: str, item: Dict) -> Tuple:
civitai = item.get("civitai") or {}
trained_words = civitai.get("trainedWords")
if isinstance(trained_words, str):
trained_words_json = trained_words
elif trained_words is None:
trained_words_json = None
else:
trained_words_json = json.dumps(trained_words)
return (
model_type,
item.get("file_path"),
item.get("file_name"),
item.get("model_name"),
item.get("folder"),
int(item.get("size") or 0),
float(item.get("modified") or 0.0),
(item.get("sha256") or "").lower() or None,
item.get("base_model"),
item.get("preview_url"),
int(item.get("preview_nsfw_level") or 0),
1 if item.get("from_civitai", True) else 0,
1 if item.get("favorite") else 0,
item.get("notes"),
item.get("usage_tips"),
civitai.get("id"),
civitai.get("modelId"),
civitai.get("name"),
trained_words_json,
1 if item.get("exclude") else 0,
1 if item.get("db_checked") else 0,
float(item.get("last_checked_at") or 0.0),
)
def _insert_model_sql(self) -> str:
return (
"INSERT INTO models (model_type, file_path, file_name, model_name, folder, size, modified, sha256,"
" base_model, preview_url, preview_nsfw_level, from_civitai, favorite, notes, usage_tips,"
" civitai_id, civitai_model_id, civitai_name, trained_words, exclude, db_checked, last_checked_at)"
" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
)
def _load_tags(self, conn: sqlite3.Connection, model_type: str) -> Dict[str, List[str]]:
tag_rows = conn.execute(
"SELECT file_path, tag FROM model_tags WHERE model_type = ?",
(model_type,),
).fetchall()
result: Dict[str, List[str]] = {}
for row in tag_rows:
result.setdefault(row["file_path"], []).append(row["tag"])
return result
def get_persistent_cache() -> PersistentModelCache:
from .settings_manager import settings as settings_service # Local import to avoid cycles
library_name = settings_service.get_active_library_name()
return PersistentModelCache.get_default(library_name)

View File

@@ -0,0 +1,168 @@
"""Service for processing preview assets for models."""
from __future__ import annotations
import logging
import os
from typing import Awaitable, Callable, Dict, Optional, Sequence
from ..utils.constants import CARD_PREVIEW_WIDTH, PREVIEW_EXTENSIONS
logger = logging.getLogger(__name__)
class PreviewAssetService:
"""Manage fetching and persisting preview assets."""
def __init__(
self,
*,
metadata_manager,
downloader_factory: Callable[[], Awaitable],
exif_utils,
) -> None:
self._metadata_manager = metadata_manager
self._downloader_factory = downloader_factory
self._exif_utils = exif_utils
async def ensure_preview_for_metadata(
self,
metadata_path: str,
local_metadata: Dict[str, object],
images: Sequence[Dict[str, object]] | None,
) -> None:
"""Ensure preview assets exist for the supplied metadata entry."""
if local_metadata.get("preview_url") and os.path.exists(
str(local_metadata["preview_url"])
):
return
if not images:
return
first_preview = images[0]
base_name = os.path.splitext(os.path.splitext(os.path.basename(metadata_path))[0])[0]
preview_dir = os.path.dirname(metadata_path)
is_video = first_preview.get("type") == "video"
if is_video:
extension = ".mp4"
preview_path = os.path.join(preview_dir, base_name + extension)
downloader = await self._downloader_factory()
success, result = await downloader.download_file(
first_preview["url"], preview_path, use_auth=False
)
if success:
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
else:
extension = ".webp"
preview_path = os.path.join(preview_dir, base_name + extension)
downloader = await self._downloader_factory()
success, content, _headers = await downloader.download_to_memory(
first_preview["url"], use_auth=False
)
if not success:
return
try:
optimized_data, _ = self._exif_utils.optimize_image(
image_data=content,
target_width=CARD_PREVIEW_WIDTH,
format="webp",
quality=85,
preserve_metadata=False,
)
with open(preview_path, "wb") as handle:
handle.write(optimized_data)
except Exception as exc: # pragma: no cover - defensive path
logger.error("Error optimizing preview image: %s", exc)
try:
with open(preview_path, "wb") as handle:
handle.write(content)
except Exception as save_exc:
logger.error("Error saving preview image: %s", save_exc)
return
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
async def replace_preview(
self,
*,
model_path: str,
preview_data: bytes,
content_type: str,
original_filename: Optional[str],
nsfw_level: int,
update_preview_in_cache: Callable[[str, str, int], Awaitable[bool]],
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
) -> Dict[str, object]:
"""Replace an existing preview asset for a model."""
base_name = os.path.splitext(os.path.basename(model_path))[0]
folder = os.path.dirname(model_path)
extension, optimized_data = await self._convert_preview(
preview_data, content_type, original_filename
)
for ext in PREVIEW_EXTENSIONS:
existing_preview = os.path.join(folder, base_name + ext)
if os.path.exists(existing_preview):
try:
os.remove(existing_preview)
except Exception as exc: # pragma: no cover - defensive path
logger.warning(
"Failed to delete existing preview %s: %s", existing_preview, exc
)
preview_path = os.path.join(folder, base_name + extension).replace(os.sep, "/")
with open(preview_path, "wb") as handle:
handle.write(optimized_data)
metadata_path = os.path.splitext(model_path)[0] + ".metadata.json"
metadata = await metadata_loader(metadata_path)
metadata["preview_url"] = preview_path
metadata["preview_nsfw_level"] = nsfw_level
await self._metadata_manager.save_metadata(model_path, metadata)
await update_preview_in_cache(model_path, preview_path, nsfw_level)
return {"preview_path": preview_path, "preview_nsfw_level": nsfw_level}
async def _convert_preview(
self, data: bytes, content_type: str, original_filename: Optional[str]
) -> tuple[str, bytes]:
"""Convert preview bytes to the persisted representation."""
if content_type.startswith("video/"):
extension = self._resolve_video_extension(content_type, original_filename)
return extension, data
original_ext = (original_filename or "").lower()
if original_ext.endswith(".gif") or content_type.lower() == "image/gif":
return ".gif", data
optimized_data, _ = self._exif_utils.optimize_image(
image_data=data,
target_width=CARD_PREVIEW_WIDTH,
format="webp",
quality=85,
preserve_metadata=False,
)
return ".webp", optimized_data
def _resolve_video_extension(self, content_type: str, original_filename: Optional[str]) -> str:
"""Infer the best extension for a video preview."""
if original_filename:
extension = os.path.splitext(original_filename)[1].lower()
if extension in {".mp4", ".webm", ".mov", ".avi"}:
return extension
if "webm" in content_type:
return ".webm"
return ".mp4"

View File

@@ -1,5 +1,5 @@
import asyncio
from typing import List, Dict
from typing import Iterable, List, Dict, Optional
from dataclasses import dataclass
from operator import itemgetter
from natsort import natsorted
@@ -10,77 +10,115 @@ class RecipeCache:
raw_data: List[Dict]
sorted_by_name: List[Dict]
sorted_by_date: List[Dict]
def __post_init__(self):
self._lock = asyncio.Lock()
async def resort(self, name_only: bool = False):
"""Resort all cached data views"""
async with self._lock:
self.sorted_by_name = natsorted(
self.raw_data,
key=lambda x: x.get('title', '').lower() # Case-insensitive sort
)
if not name_only:
self.sorted_by_date = sorted(
self.raw_data,
key=itemgetter('created_date', 'file_path'),
reverse=True
)
async def update_recipe_metadata(self, recipe_id: str, metadata: Dict) -> bool:
self._resort_locked(name_only=name_only)
async def update_recipe_metadata(self, recipe_id: str, metadata: Dict, *, resort: bool = True) -> bool:
"""Update metadata for a specific recipe in all cached data
Args:
recipe_id: The ID of the recipe to update
metadata: The new metadata
Returns:
bool: True if the update was successful, False if the recipe wasn't found
"""
async with self._lock:
for item in self.raw_data:
if str(item.get('id')) == str(recipe_id):
item.update(metadata)
if resort:
self._resort_locked()
return True
return False # Recipe not found
async def add_recipe(self, recipe_data: Dict, *, resort: bool = False) -> None:
"""Add a new recipe to the cache."""
# Update in raw_data
for item in self.raw_data:
if item.get('id') == recipe_id:
item.update(metadata)
break
else:
return False # Recipe not found
# Resort to reflect changes
await self.resort()
return True
async def add_recipe(self, recipe_data: Dict) -> None:
"""Add a new recipe to the cache
Args:
recipe_data: The recipe data to add
"""
async with self._lock:
self.raw_data.append(recipe_data)
await self.resort()
if resort:
self._resort_locked()
async def remove_recipe(self, recipe_id: str, *, resort: bool = False) -> Optional[Dict]:
"""Remove a recipe from the cache by ID.
async def remove_recipe(self, recipe_id: str) -> bool:
"""Remove a recipe from the cache by ID
Args:
recipe_id: The ID of the recipe to remove
Returns:
bool: True if the recipe was found and removed, False otherwise
The removed recipe data if found, otherwise ``None``.
"""
# Find the recipe in raw_data
recipe_index = next((i for i, recipe in enumerate(self.raw_data)
if recipe.get('id') == recipe_id), None)
if recipe_index is None:
return False
# Remove from raw_data
self.raw_data.pop(recipe_index)
# Resort to update sorted lists
await self.resort()
return True
async with self._lock:
for index, recipe in enumerate(self.raw_data):
if str(recipe.get('id')) == str(recipe_id):
removed = self.raw_data.pop(index)
if resort:
self._resort_locked()
return removed
return None
async def bulk_remove(self, recipe_ids: Iterable[str], *, resort: bool = False) -> List[Dict]:
"""Remove multiple recipes from the cache."""
id_set = {str(recipe_id) for recipe_id in recipe_ids}
if not id_set:
return []
async with self._lock:
removed = [item for item in self.raw_data if str(item.get('id')) in id_set]
if not removed:
return []
self.raw_data = [item for item in self.raw_data if str(item.get('id')) not in id_set]
if resort:
self._resort_locked()
return removed
async def replace_recipe(self, recipe_id: str, new_data: Dict, *, resort: bool = False) -> bool:
"""Replace cached data for a recipe."""
async with self._lock:
for index, recipe in enumerate(self.raw_data):
if str(recipe.get('id')) == str(recipe_id):
self.raw_data[index] = new_data
if resort:
self._resort_locked()
return True
return False
async def get_recipe(self, recipe_id: str) -> Optional[Dict]:
"""Return a shallow copy of a cached recipe."""
async with self._lock:
for recipe in self.raw_data:
if str(recipe.get('id')) == str(recipe_id):
return dict(recipe)
return None
async def snapshot(self) -> List[Dict]:
"""Return a copy of all cached recipes."""
async with self._lock:
return [dict(item) for item in self.raw_data]
def _resort_locked(self, *, name_only: bool = False) -> None:
"""Sort cached views. Caller must hold ``_lock``."""
self.sorted_by_name = natsorted(
self.raw_data,
key=lambda x: x.get('title', '').lower()
)
if not name_only:
self.sorted_by_date = sorted(
self.raw_data,
key=itemgetter('created_date', 'file_path'),
reverse=True
)

View File

@@ -3,13 +3,14 @@ import logging
import asyncio
import json
import time
from typing import List, Dict, Optional, Any, Tuple
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
from ..config import config
from .recipe_cache import RecipeCache
from .service_registry import ServiceRegistry
from .lora_scanner import LoraScanner
from .metadata_service import get_default_metadata_provider
from ..utils.utils import fuzzy_match
from .recipes.errors import RecipeNotFoundError
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
from natsort import natsorted
import sys
@@ -46,9 +47,36 @@ class RecipeScanner:
self._initialization_lock = asyncio.Lock()
self._initialization_task: Optional[asyncio.Task] = None
self._is_initializing = False
self._mutation_lock = asyncio.Lock()
self._resort_tasks: Set[asyncio.Task] = set()
if lora_scanner:
self._lora_scanner = lora_scanner
self._initialized = True
def on_library_changed(self) -> None:
"""Reset cached state when the active library changes."""
# Cancel any in-flight initialization or resorting work so the next
# access rebuilds the cache for the new library.
if self._initialization_task and not self._initialization_task.done():
self._initialization_task.cancel()
for task in list(self._resort_tasks):
if not task.done():
task.cancel()
self._resort_tasks.clear()
self._cache = None
self._initialization_task = None
self._is_initializing = False
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = None
if loop and not loop.is_closed():
loop.create_task(self.initialize_in_background())
async def _get_civitai_client(self):
"""Lazily initialize CivitaiClient from registry"""
@@ -191,6 +219,22 @@ class RecipeScanner:
# Clean up the event loop
loop.close()
def _schedule_resort(self, *, name_only: bool = False) -> None:
"""Schedule a background resort of the recipe cache."""
if not self._cache:
return
async def _resort_wrapper() -> None:
try:
await self._cache.resort(name_only=name_only)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Recipe Scanner: error resorting cache: %s", exc, exc_info=True)
task = asyncio.create_task(_resort_wrapper())
self._resort_tasks.add(task)
task.add_done_callback(lambda finished: self._resort_tasks.discard(finished))
@property
def recipes_dir(self) -> str:
"""Get path to recipes directory"""
@@ -255,7 +299,45 @@ class RecipeScanner:
# Return the cache (may be empty or partially initialized)
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
async def refresh_cache(self, force: bool = False) -> RecipeCache:
"""Public helper to refresh or return the recipe cache."""
return await self.get_cached_data(force_refresh=force)
async def add_recipe(self, recipe_data: Dict[str, Any]) -> None:
"""Add a recipe to the in-memory cache."""
if not recipe_data:
return
cache = await self.get_cached_data()
await cache.add_recipe(recipe_data, resort=False)
self._schedule_resort()
async def remove_recipe(self, recipe_id: str) -> bool:
"""Remove a recipe from the cache by ID."""
if not recipe_id:
return False
cache = await self.get_cached_data()
removed = await cache.remove_recipe(recipe_id, resort=False)
if removed is None:
return False
self._schedule_resort()
return True
async def bulk_remove(self, recipe_ids: Iterable[str]) -> int:
"""Remove multiple recipes from the cache."""
cache = await self.get_cached_data()
removed = await cache.bulk_remove(recipe_ids, resort=False)
if removed:
self._schedule_resort()
return len(removed)
async def scan_all_recipes(self) -> List[Dict]:
"""Scan all recipe JSON files and return metadata"""
recipes = []
@@ -326,7 +408,6 @@ class RecipeScanner:
# Calculate and update fingerprint if missing
if 'loras' in recipe_data and 'fingerprint' not in recipe_data:
from ..utils.utils import calculate_recipe_fingerprint
fingerprint = calculate_recipe_fingerprint(recipe_data['loras'])
recipe_data['fingerprint'] = fingerprint
@@ -368,27 +449,29 @@ class RecipeScanner:
# If has modelVersionId but no hash, look in lora cache first, then fetch from Civitai
if 'modelVersionId' in lora and not lora.get('hash'):
model_version_id = lora['modelVersionId']
# Check if model_version_id is an integer and > 0
if isinstance(model_version_id, int) and model_version_id > 0:
# Try to find in lora cache first
hash_from_cache = await self._find_hash_in_lora_cache(model_version_id)
if hash_from_cache:
lora['hash'] = hash_from_cache
metadata_updated = True
else:
# If not in cache, fetch from Civitai
result = await self._get_hash_from_civitai(model_version_id)
if isinstance(result, tuple):
hash_from_civitai, is_deleted = result
if hash_from_civitai:
lora['hash'] = hash_from_civitai
metadata_updated = True
elif is_deleted:
# Mark the lora as deleted if it was not found on Civitai
lora['isDeleted'] = True
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
metadata_updated = True
# Try to find in lora cache first
hash_from_cache = await self._find_hash_in_lora_cache(model_version_id)
if hash_from_cache:
lora['hash'] = hash_from_cache
metadata_updated = True
else:
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
# If not in cache, fetch from Civitai
result = await self._get_hash_from_civitai(model_version_id)
if isinstance(result, tuple):
hash_from_civitai, is_deleted = result
if hash_from_civitai:
lora['hash'] = hash_from_civitai
metadata_updated = True
elif is_deleted:
# Mark the lora as deleted if it was not found on Civitai
lora['isDeleted'] = True
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
metadata_updated = True
else:
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
# If has hash but no file_name, look up in lora library
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
@@ -497,9 +580,36 @@ class RecipeScanner:
logger.error(f"Error getting base model for lora: {e}")
return None
def _enrich_lora_entry(self, lora: Dict[str, Any]) -> Dict[str, Any]:
"""Populate convenience fields for a LoRA entry."""
if not lora or not self._lora_scanner:
return lora
hash_value = (lora.get('hash') or '').lower()
if not hash_value:
return lora
try:
lora['inLibrary'] = self._lora_scanner.has_hash(hash_value)
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(hash_value)
lora['localPath'] = self._lora_scanner.get_path_by_hash(hash_value)
except Exception as exc: # pragma: no cover - defensive logging
logger.debug("Error enriching lora entry %s: %s", hash_value, exc)
return lora
async def get_local_lora(self, name: str) -> Optional[Dict[str, Any]]:
"""Lookup a local LoRA model by name."""
if not self._lora_scanner or not name:
return None
return await self._lora_scanner.get_model_info_by_name(name)
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True):
"""Get paginated and filtered recipe data
Args:
page: Current page number (1-based)
page_size: Number of items per page
@@ -598,16 +708,12 @@ class RecipeScanner:
# Get paginated items
paginated_items = filtered_data[start_idx:end_idx]
# Add inLibrary information for each lora
for item in paginated_items:
if 'loras' in item:
for lora in item['loras']:
if 'hash' in lora and lora['hash']:
lora['inLibrary'] = self._lora_scanner.has_hash(lora['hash'].lower())
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(lora['hash'].lower())
lora['localPath'] = self._lora_scanner.get_path_by_hash(lora['hash'].lower())
item['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in item['loras']]
result = {
'items': paginated_items,
'total': total_items,
@@ -653,33 +759,25 @@ class RecipeScanner:
# Add lora metadata
if 'loras' in formatted_recipe:
for lora in formatted_recipe['loras']:
if 'hash' in lora and lora['hash']:
lora_hash = lora['hash'].lower()
lora['inLibrary'] = self._lora_scanner.has_hash(lora_hash)
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(lora_hash)
lora['localPath'] = self._lora_scanner.get_path_by_hash(lora_hash)
formatted_recipe['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in formatted_recipe['loras']]
return formatted_recipe
def _format_file_url(self, file_path: str) -> str:
"""Format file path as URL for serving in web UI"""
if not file_path:
return '/loras_static/images/no-preview.png'
try:
# Format file path as a URL that will work with static file serving
recipes_dir = os.path.join(config.loras_roots[0], "recipes").replace(os.sep, '/')
if file_path.replace(os.sep, '/').startswith(recipes_dir):
relative_path = os.path.relpath(file_path, config.loras_roots[0]).replace(os.sep, '/')
return f"/loras_static/root1/preview/{relative_path}"
# If not in recipes dir, try to create a valid URL from the file name
file_name = os.path.basename(file_path)
return f"/loras_static/root1/preview/recipes/{file_name}"
normalized_path = os.path.normpath(file_path)
static_url = config.get_preview_static_url(normalized_path)
if static_url:
return static_url
except Exception as e:
logger.error(f"Error formatting file URL: {e}")
return '/loras_static/images/no-preview.png'
return '/loras_static/images/no-preview.png'
def _format_timestamp(self, timestamp: float) -> str:
"""Format timestamp for display"""
@@ -717,26 +815,159 @@ class RecipeScanner:
# Save updated recipe
with open(recipe_json_path, 'w', encoding='utf-8') as f:
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
# Update the cache if it exists
if self._cache is not None:
await self._cache.update_recipe_metadata(recipe_id, metadata)
await self._cache.update_recipe_metadata(recipe_id, metadata, resort=False)
self._schedule_resort()
# If the recipe has an image, update its EXIF metadata
from ..utils.exif_utils import ExifUtils
image_path = recipe_data.get('file_path')
if image_path and os.path.exists(image_path):
ExifUtils.append_recipe_metadata(image_path, recipe_data)
return True
except Exception as e:
import logging
logging.getLogger(__name__).error(f"Error updating recipe metadata: {e}", exc_info=True)
return False
async def update_lora_entry(
self,
recipe_id: str,
lora_index: int,
*,
target_name: str,
target_lora: Optional[Dict[str, Any]] = None,
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""Update a specific LoRA entry within a recipe.
Returns the updated recipe data and the refreshed LoRA metadata.
"""
if target_name is None:
raise ValueError("target_name must be provided")
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
raise RecipeNotFoundError("Recipe not found")
async with self._mutation_lock:
with open(recipe_json_path, 'r', encoding='utf-8') as file_obj:
recipe_data = json.load(file_obj)
loras = recipe_data.get('loras', [])
if lora_index >= len(loras):
raise RecipeNotFoundError("LoRA index out of range in recipe")
lora_entry = loras[lora_index]
lora_entry['isDeleted'] = False
lora_entry['exclude'] = False
lora_entry['file_name'] = target_name
if target_lora is not None:
sha_value = target_lora.get('sha256') or target_lora.get('sha')
if sha_value:
lora_entry['hash'] = sha_value.lower()
civitai_info = target_lora.get('civitai') or {}
if civitai_info:
lora_entry['modelName'] = civitai_info.get('model', {}).get('name', '')
lora_entry['modelVersionName'] = civitai_info.get('name', '')
lora_entry['modelVersionId'] = civitai_info.get('id')
recipe_data['fingerprint'] = calculate_recipe_fingerprint(recipe_data.get('loras', []))
recipe_data['modified'] = time.time()
with open(recipe_json_path, 'w', encoding='utf-8') as file_obj:
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
cache = await self.get_cached_data()
replaced = await cache.replace_recipe(recipe_id, recipe_data, resort=False)
if not replaced:
await cache.add_recipe(recipe_data, resort=False)
self._schedule_resort()
updated_lora = dict(lora_entry)
if target_lora is not None:
preview_url = target_lora.get('preview_url')
if preview_url:
updated_lora['preview_url'] = config.get_preview_static_url(preview_url)
if target_lora.get('file_path'):
updated_lora['localPath'] = target_lora['file_path']
updated_lora = self._enrich_lora_entry(updated_lora)
return recipe_data, updated_lora
async def get_recipes_for_lora(self, lora_hash: str) -> List[Dict[str, Any]]:
"""Return recipes that reference a given LoRA hash."""
if not lora_hash:
return []
normalized_hash = lora_hash.lower()
cache = await self.get_cached_data()
matching_recipes: List[Dict[str, Any]] = []
for recipe in cache.raw_data:
loras = recipe.get('loras', [])
if any((entry.get('hash') or '').lower() == normalized_hash for entry in loras):
recipe_copy = {**recipe}
recipe_copy['loras'] = [self._enrich_lora_entry(dict(entry)) for entry in loras]
recipe_copy['file_url'] = self._format_file_url(recipe.get('file_path'))
matching_recipes.append(recipe_copy)
return matching_recipes
async def get_recipe_syntax_tokens(self, recipe_id: str) -> List[str]:
"""Build LoRA syntax tokens for a recipe."""
cache = await self.get_cached_data()
recipe = await cache.get_recipe(recipe_id)
if recipe is None:
raise RecipeNotFoundError("Recipe not found")
loras = recipe.get('loras', [])
if not loras:
return []
lora_cache = None
if self._lora_scanner is not None:
lora_cache = await self._lora_scanner.get_cached_data()
syntax_parts: List[str] = []
for lora in loras:
if lora.get('isDeleted', False):
continue
file_name = None
hash_value = (lora.get('hash') or '').lower()
if hash_value and self._lora_scanner is not None and hasattr(self._lora_scanner, '_hash_index'):
file_path = self._lora_scanner._hash_index.get_path(hash_value)
if file_path:
file_name = os.path.splitext(os.path.basename(file_path))[0]
if not file_name and lora.get('modelVersionId') and lora_cache is not None:
for cached_lora in getattr(lora_cache, 'raw_data', []):
civitai_info = cached_lora.get('civitai')
if civitai_info and civitai_info.get('id') == lora.get('modelVersionId'):
cached_path = cached_lora.get('path') or cached_lora.get('file_path')
if cached_path:
file_name = os.path.splitext(os.path.basename(cached_path))[0]
break
if not file_name:
file_name = lora.get('file_name', 'unknown-lora')
strength = lora.get('strength', 1.0)
syntax_parts.append(f"<lora:{file_name}:{strength}>")
return syntax_parts
async def update_lora_filename_by_hash(self, hash_value: str, new_file_name: str) -> Tuple[int, int]:
"""Update file_name in all recipes that contain a LoRA with the specified hash.
Args:
hash_value: The SHA256 hash value of the LoRA
new_file_name: The new file_name to set

View File

@@ -0,0 +1,23 @@
"""Recipe service layer implementations."""
from .analysis_service import RecipeAnalysisService
from .persistence_service import RecipePersistenceService
from .sharing_service import RecipeSharingService
from .errors import (
RecipeServiceError,
RecipeValidationError,
RecipeNotFoundError,
RecipeDownloadError,
RecipeConflictError,
)
__all__ = [
"RecipeAnalysisService",
"RecipePersistenceService",
"RecipeSharingService",
"RecipeServiceError",
"RecipeValidationError",
"RecipeNotFoundError",
"RecipeDownloadError",
"RecipeConflictError",
]

View File

@@ -0,0 +1,289 @@
"""Services responsible for recipe metadata analysis."""
from __future__ import annotations
import base64
import io
import os
import re
import tempfile
from dataclasses import dataclass
from typing import Any, Callable, Optional
import numpy as np
from PIL import Image
from ...utils.utils import calculate_recipe_fingerprint
from .errors import (
RecipeDownloadError,
RecipeNotFoundError,
RecipeServiceError,
RecipeValidationError,
)
@dataclass(frozen=True)
class AnalysisResult:
"""Return payload from analysis operations."""
payload: dict[str, Any]
status: int = 200
class RecipeAnalysisService:
"""Extract recipe metadata from various image sources."""
def __init__(
self,
*,
exif_utils,
recipe_parser_factory,
downloader_factory: Callable[[], Any],
metadata_collector: Optional[Callable[[], Any]] = None,
metadata_processor_cls: Optional[type] = None,
metadata_registry_cls: Optional[type] = None,
standalone_mode: bool = False,
logger,
) -> None:
self._exif_utils = exif_utils
self._recipe_parser_factory = recipe_parser_factory
self._downloader_factory = downloader_factory
self._metadata_collector = metadata_collector
self._metadata_processor_cls = metadata_processor_cls
self._metadata_registry_cls = metadata_registry_cls
self._standalone_mode = standalone_mode
self._logger = logger
async def analyze_uploaded_image(
self,
*,
image_bytes: bytes | None,
recipe_scanner,
) -> AnalysisResult:
"""Analyze an uploaded image payload."""
if not image_bytes:
raise RecipeValidationError("No image data provided")
temp_path = self._write_temp_file(image_bytes)
try:
metadata = self._exif_utils.extract_image_metadata(temp_path)
if not metadata:
return AnalysisResult({"error": "No metadata found in this image", "loras": []})
return await self._parse_metadata(
metadata,
recipe_scanner=recipe_scanner,
image_path=None,
include_image_base64=False,
)
finally:
self._safe_cleanup(temp_path)
async def analyze_remote_image(
self,
*,
url: str | None,
recipe_scanner,
civitai_client,
) -> AnalysisResult:
"""Analyze an image accessible via URL, including Civitai integration."""
if not url:
raise RecipeValidationError("No URL provided")
if civitai_client is None:
raise RecipeServiceError("Civitai client unavailable")
temp_path = self._create_temp_path()
metadata: Optional[dict[str, Any]] = None
try:
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
if civitai_match:
image_info = await civitai_client.get_image_info(civitai_match.group(1))
if not image_info:
raise RecipeDownloadError("Failed to fetch image information from Civitai")
image_url = image_info.get("url")
if not image_url:
raise RecipeDownloadError("No image URL found in Civitai response")
await self._download_image(image_url, temp_path)
metadata = image_info.get("meta") if "meta" in image_info else None
else:
await self._download_image(url, temp_path)
if metadata is None:
metadata = self._exif_utils.extract_image_metadata(temp_path)
if not metadata:
return self._metadata_not_found_response(temp_path)
return await self._parse_metadata(
metadata,
recipe_scanner=recipe_scanner,
image_path=temp_path,
include_image_base64=True,
)
finally:
self._safe_cleanup(temp_path)
async def analyze_local_image(
self,
*,
file_path: str | None,
recipe_scanner,
) -> AnalysisResult:
"""Analyze a file already present on disk."""
if not file_path:
raise RecipeValidationError("No file path provided")
normalized_path = os.path.normpath(file_path.strip('"').strip("'"))
if not os.path.isfile(normalized_path):
raise RecipeNotFoundError("File not found")
metadata = self._exif_utils.extract_image_metadata(normalized_path)
if not metadata:
return self._metadata_not_found_response(normalized_path)
return await self._parse_metadata(
metadata,
recipe_scanner=recipe_scanner,
image_path=normalized_path,
include_image_base64=True,
)
async def analyze_widget_metadata(self, *, recipe_scanner) -> AnalysisResult:
"""Analyse the most recent generation metadata for widget saves."""
if self._metadata_collector is None or self._metadata_processor_cls is None:
raise RecipeValidationError("Metadata collection not available")
raw_metadata = self._metadata_collector()
metadata_dict = self._metadata_processor_cls.to_dict(raw_metadata)
if not metadata_dict:
raise RecipeValidationError("No generation metadata found")
latest_image = None
if not self._standalone_mode and self._metadata_registry_cls is not None:
metadata_registry = self._metadata_registry_cls()
latest_image = metadata_registry.get_first_decoded_image()
if latest_image is None:
raise RecipeValidationError(
"No recent images found to use for recipe. Try generating an image first."
)
image_bytes = self._convert_tensor_to_png_bytes(latest_image)
if image_bytes is None:
raise RecipeValidationError("Cannot handle this data shape from metadata registry")
return AnalysisResult(
{
"metadata": metadata_dict,
"image_bytes": image_bytes,
}
)
# Internal helpers -------------------------------------------------
async def _parse_metadata(
self,
metadata: dict[str, Any],
*,
recipe_scanner,
image_path: Optional[str],
include_image_base64: bool,
) -> AnalysisResult:
parser = self._recipe_parser_factory.create_parser(metadata)
if parser is None:
payload = {"error": "No parser found for this image", "loras": []}
if include_image_base64 and image_path:
payload["image_base64"] = self._encode_file(image_path)
return AnalysisResult(payload)
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
if include_image_base64 and image_path:
result["image_base64"] = self._encode_file(image_path)
if "error" in result and not result.get("loras"):
return AnalysisResult(result)
fingerprint = calculate_recipe_fingerprint(result.get("loras", []))
result["fingerprint"] = fingerprint
matching_recipes: list[str] = []
if fingerprint:
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
result["matching_recipes"] = matching_recipes
return AnalysisResult(result)
async def _download_image(self, url: str, temp_path: str) -> None:
downloader = await self._downloader_factory()
success, result = await downloader.download_file(url, temp_path, use_auth=False)
if not success:
raise RecipeDownloadError(f"Failed to download image from URL: {result}")
def _metadata_not_found_response(self, path: str) -> AnalysisResult:
payload: dict[str, Any] = {"error": "No metadata found in this image", "loras": []}
if os.path.exists(path):
payload["image_base64"] = self._encode_file(path)
return AnalysisResult(payload)
def _write_temp_file(self, data: bytes) -> str:
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
temp_file.write(data)
return temp_file.name
def _create_temp_path(self) -> str:
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
return temp_file.name
def _safe_cleanup(self, path: Optional[str]) -> None:
if path and os.path.exists(path):
try:
os.unlink(path)
except Exception as exc: # pragma: no cover - defensive logging
self._logger.error("Error deleting temporary file: %s", exc)
def _encode_file(self, path: str) -> str:
with open(path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode("utf-8")
def _convert_tensor_to_png_bytes(self, latest_image: Any) -> Optional[bytes]:
try:
if isinstance(latest_image, tuple):
tensor_image = latest_image[0] if latest_image else None
if tensor_image is None:
return None
else:
tensor_image = latest_image
if hasattr(tensor_image, "shape"):
self._logger.debug(
"Tensor shape: %s, dtype: %s", tensor_image.shape, getattr(tensor_image, "dtype", None)
)
import torch # type: ignore[import-not-found]
if isinstance(tensor_image, torch.Tensor):
image_np = tensor_image.cpu().numpy()
else:
image_np = np.array(tensor_image)
while len(image_np.shape) > 3:
image_np = image_np[0]
if image_np.dtype in (np.float32, np.float64) and image_np.max() <= 1.0:
image_np = (image_np * 255).astype(np.uint8)
if len(image_np.shape) == 3 and image_np.shape[2] == 3:
pil_image = Image.fromarray(image_np)
img_byte_arr = io.BytesIO()
pil_image.save(img_byte_arr, format="PNG")
return img_byte_arr.getvalue()
except Exception as exc: # pragma: no cover - defensive logging path
self._logger.error("Error processing image data: %s", exc, exc_info=True)
return None
return None

View File

@@ -0,0 +1,22 @@
"""Shared exceptions for recipe services."""
from __future__ import annotations
class RecipeServiceError(Exception):
"""Base exception for recipe service failures."""
class RecipeValidationError(RecipeServiceError):
"""Raised when a request payload fails validation."""
class RecipeNotFoundError(RecipeServiceError):
"""Raised when a recipe resource cannot be located."""
class RecipeDownloadError(RecipeServiceError):
"""Raised when remote recipe assets cannot be downloaded."""
class RecipeConflictError(RecipeServiceError):
"""Raised when a conflicting recipe state is detected."""

View File

@@ -0,0 +1,407 @@
"""Services encapsulating recipe persistence workflows."""
from __future__ import annotations
import base64
import json
import os
import re
import time
import uuid
from dataclasses import dataclass
from typing import Any, Dict, Iterable, Optional
from ...config import config
from ...utils.utils import calculate_recipe_fingerprint
from .errors import RecipeNotFoundError, RecipeValidationError
@dataclass(frozen=True)
class PersistenceResult:
"""Return payload from persistence operations."""
payload: dict[str, Any]
status: int = 200
class RecipePersistenceService:
"""Coordinate recipe persistence tasks across storage and caches."""
def __init__(
self,
*,
exif_utils,
card_preview_width: int,
logger,
) -> None:
self._exif_utils = exif_utils
self._card_preview_width = card_preview_width
self._logger = logger
async def save_recipe(
self,
*,
recipe_scanner,
image_bytes: bytes | None,
image_base64: str | None,
name: str | None,
tags: Iterable[str],
metadata: Optional[dict[str, Any]],
) -> PersistenceResult:
"""Persist a user uploaded recipe."""
missing_fields = []
if not name:
missing_fields.append("name")
if metadata is None:
missing_fields.append("metadata")
if missing_fields:
raise RecipeValidationError(
f"Missing required fields: {', '.join(missing_fields)}"
)
resolved_image_bytes = self._resolve_image_bytes(image_bytes, image_base64)
recipes_dir = recipe_scanner.recipes_dir
os.makedirs(recipes_dir, exist_ok=True)
recipe_id = str(uuid.uuid4())
optimized_image, extension = self._exif_utils.optimize_image(
image_data=resolved_image_bytes,
target_width=self._card_preview_width,
format="webp",
quality=85,
preserve_metadata=True,
)
image_filename = f"{recipe_id}{extension}"
image_path = os.path.join(recipes_dir, image_filename)
with open(image_path, "wb") as file_obj:
file_obj.write(optimized_image)
current_time = time.time()
loras_data = [self._normalise_lora_entry(lora) for lora in metadata.get("loras", [])]
gen_params = metadata.get("gen_params", {})
if not gen_params and "raw_metadata" in metadata:
raw_metadata = metadata.get("raw_metadata", {})
gen_params = {
"prompt": raw_metadata.get("prompt", ""),
"negative_prompt": raw_metadata.get("negative_prompt", ""),
"checkpoint": raw_metadata.get("checkpoint", {}),
"steps": raw_metadata.get("steps", ""),
"sampler": raw_metadata.get("sampler", ""),
"cfg_scale": raw_metadata.get("cfg_scale", ""),
"seed": raw_metadata.get("seed", ""),
"size": raw_metadata.get("size", ""),
"clip_skip": raw_metadata.get("clip_skip", ""),
}
fingerprint = calculate_recipe_fingerprint(loras_data)
recipe_data: Dict[str, Any] = {
"id": recipe_id,
"file_path": image_path,
"title": name,
"modified": current_time,
"created_date": current_time,
"base_model": metadata.get("base_model", ""),
"loras": loras_data,
"gen_params": gen_params,
"fingerprint": fingerprint,
}
tags_list = list(tags)
if tags_list:
recipe_data["tags"] = tags_list
if metadata.get("source_path"):
recipe_data["source_path"] = metadata.get("source_path")
json_filename = f"{recipe_id}.recipe.json"
json_path = os.path.join(recipes_dir, json_filename)
with open(json_path, "w", encoding="utf-8") as file_obj:
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
await recipe_scanner.add_recipe(recipe_data)
return PersistenceResult(
{
"success": True,
"recipe_id": recipe_id,
"image_path": image_path,
"json_path": json_path,
"matching_recipes": matching_recipes,
}
)
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
"""Delete an existing recipe."""
recipes_dir = recipe_scanner.recipes_dir
if not recipes_dir or not os.path.exists(recipes_dir):
raise RecipeNotFoundError("Recipes directory not found")
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
raise RecipeNotFoundError("Recipe not found")
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
recipe_data = json.load(file_obj)
image_path = recipe_data.get("file_path")
os.remove(recipe_json_path)
if image_path and os.path.exists(image_path):
os.remove(image_path)
await recipe_scanner.remove_recipe(recipe_id)
return PersistenceResult({"success": True, "message": "Recipe deleted successfully"})
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
"""Update persisted metadata for a recipe."""
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")):
raise RecipeValidationError(
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)"
)
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
if not success:
raise RecipeNotFoundError("Recipe not found or update failed")
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
async def reconnect_lora(
self,
*,
recipe_scanner,
recipe_id: str,
lora_index: int,
target_name: str,
) -> PersistenceResult:
"""Reconnect a LoRA entry within an existing recipe."""
recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_path):
raise RecipeNotFoundError("Recipe not found")
target_lora = await recipe_scanner.get_local_lora(target_name)
if not target_lora:
raise RecipeNotFoundError(f"Local LoRA not found with name: {target_name}")
recipe_data, updated_lora = await recipe_scanner.update_lora_entry(
recipe_id,
lora_index,
target_name=target_name,
target_lora=target_lora,
)
image_path = recipe_data.get("file_path")
if image_path and os.path.exists(image_path):
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
matching_recipes = []
if "fingerprint" in recipe_data:
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(recipe_data["fingerprint"])
if recipe_id in matching_recipes:
matching_recipes.remove(recipe_id)
return PersistenceResult(
{
"success": True,
"recipe_id": recipe_id,
"updated_lora": updated_lora,
"matching_recipes": matching_recipes,
}
)
async def bulk_delete(
self,
*,
recipe_scanner,
recipe_ids: Iterable[str],
) -> PersistenceResult:
"""Delete multiple recipes in a single request."""
recipe_ids = list(recipe_ids)
if not recipe_ids:
raise RecipeValidationError("No recipe IDs provided")
recipes_dir = recipe_scanner.recipes_dir
if not recipes_dir or not os.path.exists(recipes_dir):
raise RecipeNotFoundError("Recipes directory not found")
deleted_recipes: list[str] = []
failed_recipes: list[dict[str, Any]] = []
for recipe_id in recipe_ids:
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
continue
try:
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
recipe_data = json.load(file_obj)
image_path = recipe_data.get("file_path")
os.remove(recipe_json_path)
if image_path and os.path.exists(image_path):
os.remove(image_path)
deleted_recipes.append(recipe_id)
except Exception as exc:
failed_recipes.append({"id": recipe_id, "reason": str(exc)})
if deleted_recipes:
await recipe_scanner.bulk_remove(deleted_recipes)
return PersistenceResult(
{
"success": True,
"deleted": deleted_recipes,
"failed": failed_recipes,
"total_deleted": len(deleted_recipes),
"total_failed": len(failed_recipes),
}
)
async def save_recipe_from_widget(
self,
*,
recipe_scanner,
metadata: dict[str, Any],
image_bytes: bytes,
) -> PersistenceResult:
"""Save a recipe constructed from widget metadata."""
if not metadata:
raise RecipeValidationError("No generation metadata found")
recipes_dir = recipe_scanner.recipes_dir
os.makedirs(recipes_dir, exist_ok=True)
recipe_id = str(uuid.uuid4())
optimized_image, extension = self._exif_utils.optimize_image(
image_data=image_bytes,
target_width=self._card_preview_width,
format="webp",
quality=85,
preserve_metadata=True,
)
image_filename = f"{recipe_id}{extension}"
image_path = os.path.join(recipes_dir, image_filename)
with open(image_path, "wb") as file_obj:
file_obj.write(optimized_image)
lora_stack = metadata.get("loras", "")
lora_matches = re.findall(r"<lora:([^:]+):([^>]+)>", lora_stack)
if not lora_matches:
raise RecipeValidationError("No LoRAs found in the generation metadata")
loras_data = []
base_model_counts: Dict[str, int] = {}
for name, strength in lora_matches:
lora_info = await recipe_scanner.get_local_lora(name)
lora_data = {
"file_name": name,
"strength": float(strength),
"hash": (lora_info.get("sha256") or "").lower() if lora_info else "",
"modelVersionId": (lora_info.get("civitai") or {}).get("id", 0) if lora_info else 0,
"modelName": ((lora_info.get("civitai") or {}).get("model") or {}).get("name", name) if lora_info else "",
"modelVersionName": (lora_info.get("civitai") or {}).get("name", "") if lora_info else "",
"isDeleted": False,
"exclude": False,
}
loras_data.append(lora_data)
if lora_info and "base_model" in lora_info:
base_model = lora_info["base_model"]
base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1
recipe_name = self._derive_recipe_name(lora_matches)
most_common_base_model = (
max(base_model_counts.items(), key=lambda item: item[1])[0] if base_model_counts else ""
)
recipe_data = {
"id": recipe_id,
"file_path": image_path,
"title": recipe_name,
"modified": time.time(),
"created_date": time.time(),
"base_model": most_common_base_model,
"loras": loras_data,
"checkpoint": metadata.get("checkpoint", ""),
"gen_params": {
key: value
for key, value in metadata.items()
if key not in ["checkpoint", "loras"]
},
"loras_stack": lora_stack,
}
json_filename = f"{recipe_id}.recipe.json"
json_path = os.path.join(recipes_dir, json_filename)
with open(json_path, "w", encoding="utf-8") as file_obj:
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
await recipe_scanner.add_recipe(recipe_data)
return PersistenceResult(
{
"success": True,
"recipe_id": recipe_id,
"image_path": image_path,
"json_path": json_path,
"recipe_name": recipe_name,
}
)
# Helper methods ---------------------------------------------------
def _resolve_image_bytes(self, image_bytes: bytes | None, image_base64: str | None) -> bytes:
if image_bytes is not None:
return image_bytes
if image_base64:
try:
payload = image_base64.split(",", 1)[1] if "," in image_base64 else image_base64
return base64.b64decode(payload)
except Exception as exc: # pragma: no cover - validation guard
raise RecipeValidationError(f"Invalid base64 image data: {exc}") from exc
raise RecipeValidationError("No image data provided")
def _normalise_lora_entry(self, lora: dict[str, Any]) -> dict[str, Any]:
return {
"file_name": lora.get("file_name", "")
or (
os.path.splitext(os.path.basename(lora.get("localPath", "")))[0]
if lora.get("localPath")
else ""
),
"hash": (lora.get("hash") or "").lower(),
"strength": float(lora.get("weight", 1.0)),
"modelVersionId": lora.get("id", 0),
"modelName": lora.get("name", ""),
"modelVersionName": lora.get("version", ""),
"isDeleted": lora.get("isDeleted", False),
"exclude": lora.get("exclude", False),
}
async def _find_matching_recipes(
self,
recipe_scanner,
fingerprint: str | None,
*,
exclude_id: Optional[str] = None,
) -> list[str]:
if not fingerprint:
return []
matches = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
if exclude_id and exclude_id in matches:
matches.remove(exclude_id)
return matches
def _derive_recipe_name(self, lora_matches: list[tuple[str, str]]) -> str:
recipe_name_parts = [f"{name.strip()}-{float(strength):.2f}" for name, strength in lora_matches[:3]]
recipe_name = "_".join(recipe_name_parts)
return recipe_name or "recipe"

View File

@@ -0,0 +1,105 @@
"""Services handling recipe sharing and downloads."""
from __future__ import annotations
import os
import shutil
import tempfile
import time
from dataclasses import dataclass
from typing import Any, Dict
from .errors import RecipeNotFoundError
@dataclass(frozen=True)
class SharingResult:
"""Return payload for share operations."""
payload: dict[str, Any]
status: int = 200
@dataclass(frozen=True)
class DownloadInfo:
"""Information required to stream a shared recipe file."""
file_path: str
download_filename: str
class RecipeSharingService:
"""Prepare temporary recipe downloads with TTL cleanup."""
def __init__(self, *, ttl_seconds: int = 300, logger) -> None:
self._ttl_seconds = ttl_seconds
self._logger = logger
self._shared_recipes: Dict[str, Dict[str, Any]] = {}
async def share_recipe(self, *, recipe_scanner, recipe_id: str) -> SharingResult:
"""Prepare a temporary downloadable copy of a recipe image."""
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
if not recipe:
raise RecipeNotFoundError("Recipe not found")
image_path = recipe.get("file_path")
if not image_path or not os.path.exists(image_path):
raise RecipeNotFoundError("Recipe image not found")
ext = os.path.splitext(image_path)[1]
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as temp_file:
temp_path = temp_file.name
shutil.copy2(image_path, temp_path)
timestamp = int(time.time())
self._shared_recipes[recipe_id] = {
"path": temp_path,
"timestamp": timestamp,
"expires": time.time() + self._ttl_seconds,
}
self._cleanup_shared_recipes()
safe_title = recipe.get("title", "").replace(" ", "_").lower()
filename = f"recipe_{safe_title}{ext}" if safe_title else f"recipe_{recipe_id}{ext}"
url_path = f"/api/recipe/{recipe_id}/share/download?t={timestamp}"
return SharingResult({"success": True, "download_url": url_path, "filename": filename})
async def prepare_download(self, *, recipe_scanner, recipe_id: str) -> DownloadInfo:
"""Return file path and filename for a prepared shared recipe."""
shared_info = self._shared_recipes.get(recipe_id)
if not shared_info or time.time() > shared_info.get("expires", 0):
self._cleanup_entry(recipe_id)
raise RecipeNotFoundError("Shared recipe not found or expired")
file_path = shared_info["path"]
if not os.path.exists(file_path):
self._cleanup_entry(recipe_id)
raise RecipeNotFoundError("Shared recipe file not found")
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
filename_base = (
f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" if recipe else recipe_id
)
ext = os.path.splitext(file_path)[1]
download_filename = f"{filename_base}{ext}"
return DownloadInfo(file_path=file_path, download_filename=download_filename)
def _cleanup_shared_recipes(self) -> None:
for recipe_id in list(self._shared_recipes.keys()):
shared = self._shared_recipes.get(recipe_id)
if not shared:
continue
if time.time() > shared.get("expires", 0):
self._cleanup_entry(recipe_id)
def _cleanup_entry(self, recipe_id: str) -> None:
shared_info = self._shared_recipes.pop(recipe_id, None)
if not shared_info:
return
file_path = shared_info.get("path")
if file_path and os.path.exists(file_path):
try:
os.unlink(file_path)
except Exception as exc: # pragma: no cover - defensive logging
self._logger.error("Error cleaning up shared recipe %s: %s", recipe_id, exc)

View File

@@ -1,14 +1,50 @@
import os
import copy
import json
import os
import logging
from typing import Any, Dict
from datetime import datetime, timezone
from typing import Any, Dict, Iterable, List, Mapping, Optional
from ..utils.settings_paths import ensure_settings_file
logger = logging.getLogger(__name__)
DEFAULT_SETTINGS: Dict[str, Any] = {
"civitai_api_key": "",
"language": "en",
"show_only_sfw": False,
"enable_metadata_archive_db": False,
"proxy_enabled": False,
"proxy_host": "",
"proxy_port": "",
"proxy_username": "",
"proxy_password": "",
"proxy_type": "http",
"default_lora_root": "",
"default_checkpoint_root": "",
"default_embedding_root": "",
"base_model_path_mappings": {},
"download_path_templates": {},
"example_images_path": "",
"optimize_example_images": True,
"auto_download_example_images": False,
"blur_mature_content": True,
"autoplay_on_hover": False,
"display_density": "default",
"card_info_display": "always",
"include_trigger_words": False,
"compact_mode": False,
}
class SettingsManager:
def __init__(self):
self.settings_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'settings.json')
self.settings_file = ensure_settings_file(logger)
self.settings = self._load_settings()
self._migrate_setting_keys()
self._ensure_default_settings()
self._migrate_to_library_registry()
self._migrate_download_path_template()
self._auto_set_default_roots()
self._check_environment_variables()
@@ -23,11 +59,266 @@ class SettingsManager:
logger.error(f"Error loading settings: {e}")
return self._get_default_settings()
def _ensure_default_settings(self) -> None:
"""Ensure all default settings keys exist"""
updated = False
for key, value in self._get_default_settings().items():
if key not in self.settings:
if isinstance(value, dict):
self.settings[key] = value.copy()
else:
self.settings[key] = value
updated = True
if updated:
self._save_settings()
def _migrate_to_library_registry(self) -> None:
"""Ensure settings include the multi-library registry structure."""
libraries = self.settings.get("libraries")
active_name = self.settings.get("active_library")
if not isinstance(libraries, dict) or not libraries:
library_name = active_name or "default"
library_payload = self._build_library_payload(
folder_paths=self.settings.get("folder_paths", {}),
default_lora_root=self.settings.get("default_lora_root", ""),
default_checkpoint_root=self.settings.get("default_checkpoint_root", ""),
default_embedding_root=self.settings.get("default_embedding_root", ""),
)
libraries = {library_name: library_payload}
self.settings["libraries"] = libraries
self.settings["active_library"] = library_name
self._sync_active_library_to_root(save=False)
self._save_settings()
return
sanitized_libraries: Dict[str, Dict[str, Any]] = {}
changed = False
for name, data in libraries.items():
if not isinstance(data, dict):
data = {}
changed = True
payload = self._build_library_payload(
folder_paths=data.get("folder_paths"),
default_lora_root=data.get("default_lora_root"),
default_checkpoint_root=data.get("default_checkpoint_root"),
default_embedding_root=data.get("default_embedding_root"),
metadata=data.get("metadata"),
base=data,
)
sanitized_libraries[name] = payload
if payload is not data:
changed = True
if changed:
self.settings["libraries"] = sanitized_libraries
if not active_name or active_name not in sanitized_libraries:
if sanitized_libraries:
self.settings["active_library"] = next(iter(sanitized_libraries.keys()))
else:
self.settings["active_library"] = "default"
self._sync_active_library_to_root(save=changed)
def _sync_active_library_to_root(self, *, save: bool = False) -> None:
"""Update top-level folder path settings to mirror the active library."""
libraries = self.settings.get("libraries", {})
active_name = self.settings.get("active_library")
if not libraries:
return
if active_name not in libraries:
active_name = next(iter(libraries.keys()))
self.settings["active_library"] = active_name
active_library = libraries.get(active_name, {})
folder_paths = copy.deepcopy(active_library.get("folder_paths", {}))
self.settings["folder_paths"] = folder_paths
self.settings["default_lora_root"] = active_library.get("default_lora_root", "")
self.settings["default_checkpoint_root"] = active_library.get("default_checkpoint_root", "")
self.settings["default_embedding_root"] = active_library.get("default_embedding_root", "")
if save:
self._save_settings()
def _current_timestamp(self) -> str:
return datetime.now(timezone.utc).replace(microsecond=0).isoformat()
def _build_library_payload(
self,
*,
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
default_lora_root: Optional[str] = None,
default_checkpoint_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
metadata: Optional[Mapping[str, Any]] = None,
base: Optional[Mapping[str, Any]] = None,
) -> Dict[str, Any]:
payload: Dict[str, Any] = dict(base or {})
timestamp = self._current_timestamp()
if folder_paths is not None:
payload["folder_paths"] = self._normalize_folder_paths(folder_paths)
else:
payload.setdefault("folder_paths", {})
if default_lora_root is not None:
payload["default_lora_root"] = default_lora_root
else:
payload.setdefault("default_lora_root", "")
if default_checkpoint_root is not None:
payload["default_checkpoint_root"] = default_checkpoint_root
else:
payload.setdefault("default_checkpoint_root", "")
if default_embedding_root is not None:
payload["default_embedding_root"] = default_embedding_root
else:
payload.setdefault("default_embedding_root", "")
if metadata:
merged_meta = dict(payload.get("metadata", {}))
merged_meta.update(metadata)
payload["metadata"] = merged_meta
payload.setdefault("created_at", timestamp)
payload["updated_at"] = timestamp
return payload
def _normalize_folder_paths(
self, folder_paths: Mapping[str, Iterable[str]]
) -> Dict[str, List[str]]:
normalized: Dict[str, List[str]] = {}
for key, values in folder_paths.items():
if not isinstance(values, Iterable):
continue
cleaned: List[str] = []
seen = set()
for value in values:
if not isinstance(value, str):
continue
stripped = value.strip()
if not stripped:
continue
if stripped not in seen:
cleaned.append(stripped)
seen.add(stripped)
normalized[key] = cleaned
return normalized
def _validate_folder_paths(
self,
library_name: str,
folder_paths: Mapping[str, Iterable[str]],
) -> None:
"""Ensure folder paths do not overlap with other libraries."""
libraries = self.settings.get("libraries", {})
normalized_new: Dict[str, Dict[str, str]] = {}
for key, values in folder_paths.items():
path_map: Dict[str, str] = {}
for value in values:
if not isinstance(value, str):
continue
stripped = value.strip()
if not stripped:
continue
normalized_value = os.path.normcase(os.path.normpath(stripped))
path_map[normalized_value] = stripped
if path_map:
normalized_new[key] = path_map
if not normalized_new:
return
for other_name, other in libraries.items():
if other_name == library_name:
continue
other_paths = other.get("folder_paths", {})
for key, new_paths in normalized_new.items():
existing = {
os.path.normcase(os.path.normpath(path))
for path in other_paths.get(key, [])
if isinstance(path, str) and path
}
overlap = existing.intersection(new_paths.keys())
if overlap:
collisions = ", ".join(sorted(new_paths[value] for value in overlap))
raise ValueError(
f"Folder path(s) {collisions} already assigned to library '{other_name}'"
)
def _update_active_library_entry(
self,
*,
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
default_lora_root: Optional[str] = None,
default_checkpoint_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
) -> bool:
libraries = self.settings.get("libraries", {})
active_name = self.settings.get("active_library")
if not active_name or active_name not in libraries:
return False
library = libraries[active_name]
changed = False
if folder_paths is not None:
normalized_paths = self._normalize_folder_paths(folder_paths)
if library.get("folder_paths") != normalized_paths:
library["folder_paths"] = normalized_paths
changed = True
if default_lora_root is not None and library.get("default_lora_root") != default_lora_root:
library["default_lora_root"] = default_lora_root
changed = True
if default_checkpoint_root is not None and library.get("default_checkpoint_root") != default_checkpoint_root:
library["default_checkpoint_root"] = default_checkpoint_root
changed = True
if default_embedding_root is not None and library.get("default_embedding_root") != default_embedding_root:
library["default_embedding_root"] = default_embedding_root
changed = True
if changed:
library.setdefault("created_at", self._current_timestamp())
library["updated_at"] = self._current_timestamp()
return changed
def _migrate_setting_keys(self) -> None:
"""Migrate legacy camelCase setting keys to snake_case"""
key_migrations = {
'optimizeExampleImages': 'optimize_example_images',
'autoDownloadExampleImages': 'auto_download_example_images',
'blurMatureContent': 'blur_mature_content',
'autoplayOnHover': 'autoplay_on_hover',
'displayDensity': 'display_density',
'cardInfoDisplay': 'card_info_display',
'includeTriggerWords': 'include_trigger_words',
'compactMode': 'compact_mode',
}
updated = False
for old_key, new_key in key_migrations.items():
if old_key in self.settings:
if new_key not in self.settings:
self.settings[new_key] = self.settings[old_key]
del self.settings[old_key]
updated = True
if updated:
logger.info("Migrated legacy setting keys to snake_case")
self._save_settings()
def _migrate_download_path_template(self):
"""Migrate old download_path_template to new download_path_templates"""
old_template = self.settings.get('download_path_template')
templates = self.settings.get('download_path_templates')
# If old template exists and new templates don't exist, migrate
if old_template is not None and not templates:
logger.info("Migrating download_path_template to download_path_templates")
@@ -42,25 +333,36 @@ class SettingsManager:
logger.info("Migration completed")
def _auto_set_default_roots(self):
"""Auto set default root paths if only one folder is present and default is empty."""
"""Auto set default root paths when only one folder is present and the current default is unset or not among the options."""
folder_paths = self.settings.get('folder_paths', {})
updated = False
# loras
loras = folder_paths.get('loras', [])
if isinstance(loras, list) and len(loras) == 1 and not self.settings.get('default_lora_root'):
self.settings['default_lora_root'] = loras[0]
updated = True
if isinstance(loras, list) and len(loras) == 1:
current_lora_root = self.settings.get('default_lora_root')
if current_lora_root not in loras:
self.settings['default_lora_root'] = loras[0]
updated = True
# checkpoints
checkpoints = folder_paths.get('checkpoints', [])
if isinstance(checkpoints, list) and len(checkpoints) == 1 and not self.settings.get('default_checkpoint_root'):
self.settings['default_checkpoint_root'] = checkpoints[0]
updated = True
if isinstance(checkpoints, list) and len(checkpoints) == 1:
current_checkpoint_root = self.settings.get('default_checkpoint_root')
if current_checkpoint_root not in checkpoints:
self.settings['default_checkpoint_root'] = checkpoints[0]
updated = True
# embeddings
embeddings = folder_paths.get('embeddings', [])
if isinstance(embeddings, list) and len(embeddings) == 1 and not self.settings.get('default_embedding_root'):
self.settings['default_embedding_root'] = embeddings[0]
updated = True
if isinstance(embeddings, list) and len(embeddings) == 1:
current_embedding_root = self.settings.get('default_embedding_root')
if current_embedding_root not in embeddings:
self.settings['default_embedding_root'] = embeddings[0]
updated = True
if updated:
self._update_active_library_entry(
default_lora_root=self.settings.get('default_lora_root'),
default_checkpoint_root=self.settings.get('default_checkpoint_root'),
default_embedding_root=self.settings.get('default_embedding_root'),
)
self._save_settings()
def _check_environment_variables(self) -> None:
@@ -78,18 +380,11 @@ class SettingsManager:
def _get_default_settings(self) -> Dict[str, Any]:
"""Return default settings"""
return {
"civitai_api_key": "",
"language": "en",
"show_only_sfw": False, # Show only SFW content
"enable_metadata_archive_db": False, # Enable metadata archive database
"proxy_enabled": False, # Enable app-level proxy
"proxy_host": "", # Proxy host
"proxy_port": "", # Proxy port
"proxy_username": "", # Proxy username (optional)
"proxy_password": "", # Proxy password (optional)
"proxy_type": "http" # Proxy type: http, https, socks4, socks5
}
defaults = DEFAULT_SETTINGS.copy()
# Ensure nested dicts are independent copies
defaults['base_model_path_mappings'] = {}
defaults['download_path_templates'] = {}
return defaults
def get(self, key: str, default: Any = None) -> Any:
"""Get setting value"""
@@ -98,6 +393,14 @@ class SettingsManager:
def set(self, key: str, value: Any) -> None:
"""Set setting value and save"""
self.settings[key] = value
if key == 'folder_paths' and isinstance(value, Mapping):
self._update_active_library_entry(folder_paths=value) # type: ignore[arg-type]
elif key == 'default_lora_root':
self._update_active_library_entry(default_lora_root=str(value))
elif key == 'default_checkpoint_root':
self._update_active_library_entry(default_checkpoint_root=str(value))
elif key == 'default_embedding_root':
self._update_active_library_entry(default_embedding_root=str(value))
self._save_settings()
def delete(self, key: str) -> None:
@@ -115,6 +418,227 @@ class SettingsManager:
except Exception as e:
logger.error(f"Error saving settings: {e}")
def get_libraries(self) -> Dict[str, Dict[str, Any]]:
"""Return a copy of the registered libraries."""
libraries = self.settings.get("libraries", {})
return copy.deepcopy(libraries)
def get_active_library_name(self) -> str:
"""Return the currently active library name."""
libraries = self.settings.get("libraries", {})
active_name = self.settings.get("active_library")
if active_name and active_name in libraries:
return active_name
if libraries:
return next(iter(libraries.keys()))
return "default"
def get_active_library(self) -> Dict[str, Any]:
"""Return a copy of the active library configuration."""
libraries = self.settings.get("libraries", {})
active_name = self.get_active_library_name()
return copy.deepcopy(libraries.get(active_name, {}))
def activate_library(self, library_name: str) -> None:
"""Activate a library by name and refresh dependent services."""
libraries = self.settings.get("libraries", {})
if library_name not in libraries:
raise KeyError(f"Library '{library_name}' does not exist")
current_active = self.get_active_library_name()
if current_active == library_name:
# Ensure root settings stay in sync even if already active
self._sync_active_library_to_root(save=False)
self._save_settings()
self._notify_library_change(library_name)
return
self.settings["active_library"] = library_name
self._sync_active_library_to_root(save=False)
self._save_settings()
self._notify_library_change(library_name)
def upsert_library(
self,
library_name: str,
*,
folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
default_lora_root: Optional[str] = None,
default_checkpoint_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
metadata: Optional[Mapping[str, Any]] = None,
activate: bool = False,
) -> Dict[str, Any]:
"""Create or update a library definition."""
name = library_name.strip()
if not name:
raise ValueError("Library name cannot be empty")
if folder_paths is not None:
self._validate_folder_paths(name, folder_paths)
libraries = self.settings.setdefault("libraries", {})
existing = libraries.get(name, {})
payload = self._build_library_payload(
folder_paths=folder_paths if folder_paths is not None else existing.get("folder_paths"),
default_lora_root=default_lora_root if default_lora_root is not None else existing.get("default_lora_root"),
default_checkpoint_root=(
default_checkpoint_root
if default_checkpoint_root is not None
else existing.get("default_checkpoint_root")
),
default_embedding_root=(
default_embedding_root
if default_embedding_root is not None
else existing.get("default_embedding_root")
),
metadata=metadata if metadata is not None else existing.get("metadata"),
base=existing,
)
libraries[name] = payload
if activate or not self.settings.get("active_library"):
self.settings["active_library"] = name
self._sync_active_library_to_root(save=False)
self._save_settings()
if self.settings.get("active_library") == name:
self._notify_library_change(name)
return payload
def create_library(
self,
library_name: str,
*,
folder_paths: Mapping[str, Iterable[str]],
default_lora_root: str = "",
default_checkpoint_root: str = "",
default_embedding_root: str = "",
metadata: Optional[Mapping[str, Any]] = None,
activate: bool = False,
) -> Dict[str, Any]:
"""Create a new library entry."""
libraries = self.settings.get("libraries", {})
if library_name in libraries:
raise ValueError(f"Library '{library_name}' already exists")
return self.upsert_library(
library_name,
folder_paths=folder_paths,
default_lora_root=default_lora_root,
default_checkpoint_root=default_checkpoint_root,
default_embedding_root=default_embedding_root,
metadata=metadata,
activate=activate,
)
def rename_library(self, old_name: str, new_name: str) -> None:
"""Rename an existing library."""
libraries = self.settings.get("libraries", {})
if old_name not in libraries:
raise KeyError(f"Library '{old_name}' does not exist")
new_name_stripped = new_name.strip()
if not new_name_stripped:
raise ValueError("New library name cannot be empty")
if new_name_stripped in libraries:
raise ValueError(f"Library '{new_name_stripped}' already exists")
libraries[new_name_stripped] = libraries.pop(old_name)
if self.settings.get("active_library") == old_name:
self.settings["active_library"] = new_name_stripped
active_name = new_name_stripped
else:
active_name = self.settings.get("active_library")
self._sync_active_library_to_root(save=False)
self._save_settings()
if active_name == new_name_stripped:
self._notify_library_change(new_name_stripped)
def delete_library(self, library_name: str) -> None:
"""Remove a library definition."""
libraries = self.settings.get("libraries", {})
if library_name not in libraries:
raise KeyError(f"Library '{library_name}' does not exist")
if len(libraries) == 1:
raise ValueError("At least one library must remain")
was_active = self.settings.get("active_library") == library_name
libraries.pop(library_name)
if was_active:
new_active = next(iter(libraries.keys()))
self.settings["active_library"] = new_active
self._sync_active_library_to_root(save=False)
self._save_settings()
if was_active:
self._notify_library_change(self.settings["active_library"])
def update_active_library_paths(
self,
folder_paths: Mapping[str, Iterable[str]],
*,
default_lora_root: Optional[str] = None,
default_checkpoint_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
) -> None:
"""Update folder paths for the active library."""
active_name = self.get_active_library_name()
self.upsert_library(
active_name,
folder_paths=folder_paths,
default_lora_root=default_lora_root,
default_checkpoint_root=default_checkpoint_root,
default_embedding_root=default_embedding_root,
activate=True,
)
def _notify_library_change(self, library_name: str) -> None:
"""Notify dependent services that the active library changed."""
libraries = self.settings.get("libraries", {})
library_config = libraries.get(library_name, {})
library_snapshot = copy.deepcopy(library_config)
try:
from ..config import config # Local import to avoid circular dependency
config.apply_library_settings(library_snapshot)
except Exception as exc: # pragma: no cover - defensive logging
logger.debug("Failed to apply library settings to config: %s", exc)
try:
from .service_registry import ServiceRegistry # type: ignore
for service_name in (
"lora_scanner",
"checkpoint_scanner",
"embedding_scanner",
"recipe_scanner",
):
service = ServiceRegistry.get_service_sync(service_name)
if service and hasattr(service, "on_library_changed"):
try:
service.on_library_changed()
except Exception as service_exc: # pragma: no cover - defensive logging
logger.debug(
"Service %s failed to handle library change: %s",
service_name,
service_exc,
)
except Exception as exc: # pragma: no cover - defensive logging
logger.debug("Failed to notify services about library change: %s", exc)
def get_download_path_template(self, model_type: str) -> str:
"""Get download path template for specific model type

View File

@@ -0,0 +1,47 @@
"""Service for updating tag collections on metadata records."""
from __future__ import annotations
import os
from typing import Awaitable, Callable, Dict, List, Sequence
class TagUpdateService:
"""Encapsulate tag manipulation for models."""
def __init__(self, *, metadata_manager) -> None:
self._metadata_manager = metadata_manager
async def add_tags(
self,
*,
file_path: str,
new_tags: Sequence[str],
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
update_cache: Callable[[str, str, Dict[str, object]], Awaitable[bool]],
) -> List[str]:
"""Add tags to a metadata entry while keeping case-insensitive uniqueness."""
base, _ = os.path.splitext(file_path)
metadata_path = f"{base}.metadata.json"
metadata = await metadata_loader(metadata_path)
existing_tags = list(metadata.get("tags", []))
existing_lower = [tag.lower() for tag in existing_tags]
tags_added: List[str] = []
for tag in new_tags:
if isinstance(tag, str) and tag.strip():
normalized = tag.strip()
if normalized.lower() not in existing_lower:
existing_tags.append(normalized)
existing_lower.append(normalized.lower())
tags_added.append(normalized)
metadata["tags"] = existing_tags
await self._metadata_manager.save_metadata(file_path, metadata)
await update_cache(file_path, file_path, metadata)
return existing_tags

View File

@@ -0,0 +1,37 @@
"""Application-level orchestration services for model routes."""
from .auto_organize_use_case import (
AutoOrganizeInProgressError,
AutoOrganizeUseCase,
)
from .bulk_metadata_refresh_use_case import (
BulkMetadataRefreshUseCase,
MetadataRefreshProgressReporter,
)
from .download_model_use_case import (
DownloadModelEarlyAccessError,
DownloadModelUseCase,
DownloadModelValidationError,
)
from .example_images import (
DownloadExampleImagesConfigurationError,
DownloadExampleImagesInProgressError,
DownloadExampleImagesUseCase,
ImportExampleImagesUseCase,
ImportExampleImagesValidationError,
)
__all__ = [
"AutoOrganizeInProgressError",
"AutoOrganizeUseCase",
"BulkMetadataRefreshUseCase",
"MetadataRefreshProgressReporter",
"DownloadModelEarlyAccessError",
"DownloadModelUseCase",
"DownloadModelValidationError",
"DownloadExampleImagesConfigurationError",
"DownloadExampleImagesInProgressError",
"DownloadExampleImagesUseCase",
"ImportExampleImagesUseCase",
"ImportExampleImagesValidationError",
]

View File

@@ -0,0 +1,56 @@
"""Auto-organize use case orchestrating concurrency and progress handling."""
from __future__ import annotations
import asyncio
from typing import Optional, Protocol, Sequence
from ..model_file_service import AutoOrganizeResult, ModelFileService, ProgressCallback
class AutoOrganizeLockProvider(Protocol):
"""Minimal protocol for objects exposing auto-organize locking primitives."""
def is_auto_organize_running(self) -> bool:
"""Return ``True`` when an auto-organize operation is in-flight."""
async def get_auto_organize_lock(self) -> asyncio.Lock:
"""Return the asyncio lock guarding auto-organize operations."""
class AutoOrganizeInProgressError(RuntimeError):
"""Raised when an auto-organize run is already active."""
class AutoOrganizeUseCase:
"""Coordinate auto-organize execution behind a shared lock."""
def __init__(
self,
*,
file_service: ModelFileService,
lock_provider: AutoOrganizeLockProvider,
) -> None:
self._file_service = file_service
self._lock_provider = lock_provider
async def execute(
self,
*,
file_paths: Optional[Sequence[str]] = None,
progress_callback: Optional[ProgressCallback] = None,
) -> AutoOrganizeResult:
"""Run the auto-organize routine guarded by a shared lock."""
if self._lock_provider.is_auto_organize_running():
raise AutoOrganizeInProgressError("Auto-organize is already running")
lock = await self._lock_provider.get_auto_organize_lock()
if lock.locked():
raise AutoOrganizeInProgressError("Auto-organize is already running")
async with lock:
return await self._file_service.auto_organize_models(
file_paths=list(file_paths) if file_paths is not None else None,
progress_callback=progress_callback,
)

View File

@@ -0,0 +1,122 @@
"""Use case encapsulating the bulk metadata refresh orchestration."""
from __future__ import annotations
import logging
from typing import Any, Dict, Optional, Protocol, Sequence
from ..metadata_sync_service import MetadataSyncService
class MetadataRefreshProgressReporter(Protocol):
"""Protocol for progress reporters used during metadata refresh."""
async def on_progress(self, payload: Dict[str, Any]) -> None:
"""Handle a metadata refresh progress update."""
class BulkMetadataRefreshUseCase:
"""Coordinate bulk metadata refreshes with progress emission."""
def __init__(
self,
*,
service,
metadata_sync: MetadataSyncService,
settings_service,
logger: Optional[logging.Logger] = None,
) -> None:
self._service = service
self._metadata_sync = metadata_sync
self._settings = settings_service
self._logger = logger or logging.getLogger(__name__)
async def execute(
self,
*,
progress_callback: Optional[MetadataRefreshProgressReporter] = None,
) -> Dict[str, Any]:
"""Refresh metadata for all qualifying models."""
cache = await self._service.scanner.get_cached_data()
total_models = len(cache.raw_data)
enable_metadata_archive_db = self._settings.get("enable_metadata_archive_db", False)
to_process: Sequence[Dict[str, Any]] = [
model
for model in cache.raw_data
if model.get("sha256")
and (not model.get("civitai") or not model["civitai"].get("id"))
and (
(enable_metadata_archive_db and not model.get("db_checked", False))
or (not enable_metadata_archive_db and model.get("from_civitai") is True)
)
]
total_to_process = len(to_process)
processed = 0
success = 0
needs_resort = False
async def emit(status: str, **extra: Any) -> None:
if progress_callback is None:
return
payload = {"status": status, "total": total_to_process, "processed": processed, "success": success}
payload.update(extra)
await progress_callback.on_progress(payload)
await emit("started")
for model in to_process:
try:
original_name = model.get("model_name")
result, _ = await self._metadata_sync.fetch_and_update_model(
sha256=model["sha256"],
file_path=model["file_path"],
model_data=model,
update_cache_func=self._service.scanner.update_single_model_cache,
)
if result:
success += 1
if original_name != model.get("model_name"):
needs_resort = True
processed += 1
await emit(
"processing",
processed=processed,
success=success,
current_name=model.get("model_name", "Unknown"),
)
except Exception as exc: # pragma: no cover - logging path
processed += 1
self._logger.error(
"Error fetching CivitAI data for %s: %s",
model.get("file_path"),
exc,
)
if needs_resort:
await cache.resort()
await emit("completed", processed=processed, success=success)
message = (
"Successfully updated "
f"{success} of {processed} processed {self._service.model_type}s (total: {total_models})"
)
return {"success": True, "message": message, "processed": processed, "updated": success, "total": total_models}
async def execute_with_error_handling(
self,
*,
progress_callback: Optional[MetadataRefreshProgressReporter] = None,
) -> Dict[str, Any]:
"""Wrapper providing progress notification on unexpected failures."""
try:
return await self.execute(progress_callback=progress_callback)
except Exception as exc:
if progress_callback is not None:
await progress_callback.on_progress({"status": "error", "error": str(exc)})
raise

View File

@@ -0,0 +1,37 @@
"""Use case for scheduling model downloads with consistent error handling."""
from __future__ import annotations
from typing import Any, Dict
from ..download_coordinator import DownloadCoordinator
class DownloadModelValidationError(ValueError):
"""Raised when incoming payload validation fails."""
class DownloadModelEarlyAccessError(RuntimeError):
"""Raised when the download is gated behind Civitai early access."""
class DownloadModelUseCase:
"""Coordinate download scheduling through the coordinator service."""
def __init__(self, *, download_coordinator: DownloadCoordinator) -> None:
self._download_coordinator = download_coordinator
async def execute(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Schedule a download and normalize error conditions."""
try:
return await self._download_coordinator.schedule_download(payload)
except ValueError as exc:
raise DownloadModelValidationError(str(exc)) from exc
except Exception as exc: # pragma: no cover - defensive logging path
message = str(exc)
if "401" in message:
raise DownloadModelEarlyAccessError(
"Early Access Restriction: This model requires purchase. Please buy early access on Civitai.com."
) from exc
raise

View File

@@ -0,0 +1,19 @@
"""Example image specific use case exports."""
from .download_example_images_use_case import (
DownloadExampleImagesUseCase,
DownloadExampleImagesInProgressError,
DownloadExampleImagesConfigurationError,
)
from .import_example_images_use_case import (
ImportExampleImagesUseCase,
ImportExampleImagesValidationError,
)
__all__ = [
"DownloadExampleImagesUseCase",
"DownloadExampleImagesInProgressError",
"DownloadExampleImagesConfigurationError",
"ImportExampleImagesUseCase",
"ImportExampleImagesValidationError",
]

View File

@@ -0,0 +1,42 @@
"""Use case coordinating example image downloads."""
from __future__ import annotations
from typing import Any, Dict
from ....utils.example_images_download_manager import (
DownloadConfigurationError,
DownloadInProgressError,
ExampleImagesDownloadError,
)
class DownloadExampleImagesInProgressError(RuntimeError):
"""Raised when a download is already running."""
def __init__(self, progress: Dict[str, Any]) -> None:
super().__init__("Download already in progress")
self.progress = progress
class DownloadExampleImagesConfigurationError(ValueError):
"""Raised when settings prevent downloads from starting."""
class DownloadExampleImagesUseCase:
"""Validate payloads and trigger the download manager."""
def __init__(self, *, download_manager) -> None:
self._download_manager = download_manager
async def execute(self, payload: Dict[str, Any]) -> Dict[str, Any]:
"""Start a download and translate manager errors."""
try:
return await self._download_manager.start_download(payload)
except DownloadInProgressError as exc:
raise DownloadExampleImagesInProgressError(exc.progress_snapshot) from exc
except DownloadConfigurationError as exc:
raise DownloadExampleImagesConfigurationError(str(exc)) from exc
except ExampleImagesDownloadError:
raise

View File

@@ -0,0 +1,86 @@
"""Use case for importing example images."""
from __future__ import annotations
import os
import tempfile
from contextlib import suppress
from typing import Any, Dict, List
from aiohttp import web
from ....utils.example_images_processor import (
ExampleImagesImportError,
ExampleImagesProcessor,
ExampleImagesValidationError,
)
class ImportExampleImagesValidationError(ValueError):
"""Raised when request validation fails."""
class ImportExampleImagesUseCase:
"""Parse upload payloads and delegate to the processor service."""
def __init__(self, *, processor: ExampleImagesProcessor) -> None:
self._processor = processor
async def execute(self, request: web.Request) -> Dict[str, Any]:
model_hash: str | None = None
files_to_import: List[str] = []
temp_files: List[str] = []
try:
if request.content_type and "multipart/form-data" in request.content_type:
reader = await request.multipart()
first_field = await reader.next()
if first_field and first_field.name == "model_hash":
model_hash = await first_field.text()
else:
# Support clients that send files first and hash later
if first_field is not None:
await self._collect_upload_file(first_field, files_to_import, temp_files)
async for field in reader:
if field.name == "model_hash" and not model_hash:
model_hash = await field.text()
elif field.name == "files":
await self._collect_upload_file(field, files_to_import, temp_files)
else:
data = await request.json()
model_hash = data.get("model_hash")
files_to_import = list(data.get("file_paths", []))
result = await self._processor.import_images(model_hash, files_to_import)
return result
except ExampleImagesValidationError as exc:
raise ImportExampleImagesValidationError(str(exc)) from exc
except ExampleImagesImportError:
raise
finally:
for path in temp_files:
with suppress(Exception):
os.remove(path)
async def _collect_upload_file(
self,
field: Any,
files_to_import: List[str],
temp_files: List[str],
) -> None:
"""Persist an uploaded file to disk and add it to the import list."""
filename = field.filename or "upload"
file_ext = os.path.splitext(filename)[1].lower()
with tempfile.NamedTemporaryFile(suffix=file_ext, delete=False) as tmp_file:
temp_files.append(tmp_file.name)
while True:
chunk = await field.read_chunk()
if not chunk:
break
tmp_file.write(chunk)
files_to_import.append(tmp_file.name)

View File

@@ -16,6 +16,8 @@ class WebSocketManager:
self._download_websockets: Dict[str, web.WebSocketResponse] = {} # New dict for download-specific clients
# Add progress tracking dictionary
self._download_progress: Dict[str, Dict] = {}
# Cache last initialization progress payloads
self._last_init_progress: Dict[str, Dict] = {}
# Add auto-organize progress tracking
self._auto_organize_progress: Optional[Dict] = None
self._auto_organize_lock = asyncio.Lock()
@@ -39,8 +41,10 @@ class WebSocketManager:
ws = web.WebSocketResponse()
await ws.prepare(request)
self._init_websockets.add(ws)
try:
await self._send_cached_init_progress(ws)
async for msg in ws:
if msg.type == web.WSMsgType.ERROR:
logger.error(f'Init WebSocket error: {ws.exception()}')
@@ -102,23 +106,53 @@ class WebSocketManager:
async def broadcast_init_progress(self, data: Dict):
"""Broadcast initialization progress to connected clients"""
payload = dict(data) if data else {}
if 'stage' not in payload:
payload['stage'] = 'processing'
if 'progress' not in payload:
payload['progress'] = 0
if 'details' not in payload:
payload['details'] = 'Processing...'
key = self._get_init_progress_key(payload)
self._last_init_progress[key] = dict(payload)
if not self._init_websockets:
return
# Ensure data has all required fields
if 'stage' not in data:
data['stage'] = 'processing'
if 'progress' not in data:
data['progress'] = 0
if 'details' not in data:
data['details'] = 'Processing...'
for ws in self._init_websockets:
stale_clients = []
for ws in list(self._init_websockets):
try:
await ws.send_json(data)
await ws.send_json(payload)
except Exception as e:
logger.error(f"Error sending initialization progress: {e}")
stale_clients.append(ws)
for ws in stale_clients:
self._init_websockets.discard(ws)
async def _send_cached_init_progress(self, ws: web.WebSocketResponse) -> None:
"""Send cached initialization progress payloads to a new client"""
if not self._last_init_progress:
return
for payload in list(self._last_init_progress.values()):
try:
await ws.send_json(payload)
except Exception as e:
logger.debug(f'Error sending cached initialization progress: {e}')
def _get_init_progress_key(self, data: Dict) -> str:
"""Return a stable key for caching initialization progress payloads"""
page_type = data.get('pageType')
if page_type:
return f'page:{page_type}'
scanner_type = data.get('scanner_type')
if scanner_type:
return f'scanner:{scanner_type}'
return 'global'
async def broadcast_download_progress(self, download_id: str, data: Dict):
"""Send progress update to specific download client"""
# Store simplified progress data in memory (only progress percentage)
@@ -202,4 +236,5 @@ class WebSocketManager:
return str(uuid4())
# Global instance
ws_manager = WebSocketManager()
ws_manager = WebSocketManager()

View File

@@ -1,11 +1,29 @@
from typing import Dict, Any
"""Progress callback implementations backed by the shared WebSocket manager."""
from typing import Any, Dict, Protocol
from .model_file_service import ProgressCallback
from .websocket_manager import ws_manager
class WebSocketProgressCallback(ProgressCallback):
"""WebSocket implementation of progress callback"""
class ProgressReporter(Protocol):
"""Protocol representing an async progress callback."""
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
"""Send progress data via WebSocket"""
await ws_manager.broadcast_auto_organize_progress(progress_data)
"""Handle a progress update payload."""
class WebSocketProgressCallback(ProgressCallback):
"""WebSocket implementation of progress callback."""
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
"""Send progress data via WebSocket."""
await ws_manager.broadcast_auto_organize_progress(progress_data)
class WebSocketBroadcastCallback:
"""Generic WebSocket progress callback broadcasting to all clients."""
async def on_progress(self, progress_data: Dict[str, Any]) -> None:
"""Send the provided payload to all connected clients."""
await ws_manager.broadcast(progress_data)

File diff suppressed because it is too large Load Diff

View File

@@ -4,6 +4,10 @@ import sys
import subprocess
from aiohttp import web
from ..services.settings_manager import settings
from ..utils.example_images_paths import (
get_model_folder,
get_model_relative_path,
)
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
logger = logging.getLogger(__name__)
@@ -41,8 +45,12 @@ class ExampleImagesFileManager:
}, status=400)
# Construct folder path for this model
model_folder = os.path.join(example_images_path, model_hash)
model_folder = os.path.abspath(model_folder) # Get absolute path
model_folder = get_model_folder(model_hash)
if not model_folder:
return web.json_response({
'success': False,
'error': 'Failed to resolve example images folder for this model.'
}, status=500)
# Path validation: ensure model_folder is under example_images_path
if not model_folder.startswith(os.path.abspath(example_images_path)):
@@ -109,8 +117,13 @@ class ExampleImagesFileManager:
}, status=400)
# Construct folder path for this model
model_folder = os.path.join(example_images_path, model_hash)
model_folder = get_model_folder(model_hash)
if not model_folder:
return web.json_response({
'success': False,
'error': 'Failed to resolve example images folder for this model'
}, status=500)
# Check if folder exists
if not os.path.exists(model_folder):
return web.json_response({
@@ -128,9 +141,10 @@ class ExampleImagesFileManager:
file_ext = os.path.splitext(file)[1].lower()
if (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
relative_path = get_model_relative_path(model_hash)
files.append({
'name': file,
'path': f'/example_images_static/{model_hash}/{file}',
'path': f'/example_images_static/{relative_path}/{file}',
'extension': file_ext,
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
})
@@ -176,8 +190,13 @@ class ExampleImagesFileManager:
})
# Construct folder path for this model
model_folder = os.path.join(example_images_path, model_hash)
model_folder = get_model_folder(model_hash)
if not model_folder:
return web.json_response({
'has_images': False,
'error': 'Failed to resolve example images folder for this model'
})
# Check if folder exists
if not os.path.exists(model_folder) or not os.path.isdir(model_folder):
return web.json_response({

View File

@@ -1,19 +1,39 @@
import logging
import os
import re
from ..utils.metadata_manager import MetadataManager
from ..utils.routes_common import ModelRouteUtils
from ..recipes.constants import GEN_PARAM_KEYS
from ..services.metadata_service import get_default_metadata_provider, get_metadata_provider
from ..services.metadata_sync_service import MetadataSyncService
from ..services.preview_asset_service import PreviewAssetService
from ..services.settings_manager import settings
from ..services.downloader import get_downloader
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
from ..utils.exif_utils import ExifUtils
from ..recipes.constants import GEN_PARAM_KEYS
from ..utils.metadata_manager import MetadataManager
logger = logging.getLogger(__name__)
_preview_service = PreviewAssetService(
metadata_manager=MetadataManager,
downloader_factory=get_downloader,
exif_utils=ExifUtils,
)
_metadata_sync_service = MetadataSyncService(
metadata_manager=MetadataManager,
preview_service=_preview_service,
settings=settings,
default_metadata_provider_factory=get_default_metadata_provider,
metadata_provider_selector=get_metadata_provider,
)
class MetadataUpdater:
"""Handles updating model metadata related to example images"""
@staticmethod
async def refresh_model_metadata(model_hash, model_name, scanner_type, scanner):
async def refresh_model_metadata(model_hash, model_name, scanner_type, scanner, progress: dict | None = None):
"""Refresh model metadata from CivitAI
Args:
@@ -25,8 +45,6 @@ class MetadataUpdater:
Returns:
bool: True if metadata was successfully refreshed, False otherwise
"""
from ..utils.example_images_download_manager import download_progress
try:
# Find the model in the scanner cache
cache = await scanner.get_cached_data()
@@ -47,17 +65,17 @@ class MetadataUpdater:
return False
# Track that we're refreshing this model
download_progress['refreshed_models'].add(model_hash)
if progress is not None:
progress['refreshed_models'].add(model_hash)
# Use ModelRouteUtils to refresh metadata
async def update_cache_func(old_path, new_path, metadata):
return await scanner.update_single_model_cache(old_path, new_path, metadata)
success, error = await ModelRouteUtils.fetch_and_update_model(
model_hash,
file_path,
model_data,
update_cache_func
success, error = await _metadata_sync_service.fetch_and_update_model(
sha256=model_hash,
file_path=file_path,
model_data=model_data,
update_cache_func=update_cache_func,
)
if success:
@@ -66,31 +84,46 @@ class MetadataUpdater:
else:
logger.warning(f"Failed to refresh metadata for {model_name}, {error}")
return False
except Exception as e:
error_msg = f"Error refreshing metadata for {model_name}: {str(e)}"
logger.error(error_msg, exc_info=True)
download_progress['errors'].append(error_msg)
download_progress['last_error'] = error_msg
if progress is not None:
progress['errors'].append(error_msg)
progress['last_error'] = error_msg
return False
@staticmethod
async def get_updated_model(model_hash, scanner):
"""Get updated model data
Args:
model_hash: SHA256 hash of the model
scanner: Scanner instance
Returns:
dict: Updated model data or None if not found
"""
"""Load the most recent metadata for a model identified by hash."""
cache = await scanner.get_cached_data()
target = None
for item in cache.raw_data:
if item.get('sha256') == model_hash:
return item
return None
target = item
break
if not target:
return None
file_path = target.get('file_path')
if not file_path:
return target
model_cls = getattr(scanner, 'model_class', None)
if model_cls is None:
metadata, should_skip = await MetadataManager.load_metadata(file_path)
else:
metadata, should_skip = await MetadataManager.load_metadata(file_path, model_cls)
if should_skip or metadata is None:
return target
rich_metadata = metadata.to_dict()
rich_metadata.setdefault('folder', target.get('folder', ''))
return rich_metadata
@staticmethod
async def update_metadata_from_local_examples(model_hash, model, scanner_type, scanner, model_dir):
"""Update model metadata with local example image information

View File

@@ -5,6 +5,7 @@ import re
import json
from ..services.settings_manager import settings
from ..services.service_registry import ServiceRegistry
from ..utils.example_images_paths import iter_library_roots
from ..utils.metadata_manager import MetadataManager
from ..utils.example_images_processor import ExampleImagesProcessor
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
@@ -19,29 +20,35 @@ class ExampleImagesMigration:
@staticmethod
async def check_and_run_migrations():
"""Check if migrations are needed and run them in background"""
example_images_path = settings.get('example_images_path')
if not example_images_path or not os.path.exists(example_images_path):
root = settings.get('example_images_path')
if not root or not os.path.exists(root):
logger.debug("No example images path configured or path doesn't exist, skipping migrations")
return
# Check current version from progress file
current_version = 0
progress_file = os.path.join(example_images_path, '.download_progress.json')
if os.path.exists(progress_file):
try:
with open(progress_file, 'r', encoding='utf-8') as f:
progress_data = json.load(f)
current_version = progress_data.get('naming_version', 0)
except Exception as e:
logger.error(f"Failed to load progress file for migration check: {e}")
# If current version is less than target version, start migration
if current_version < CURRENT_NAMING_VERSION:
logger.info(f"Starting example images naming migration from v{current_version} to v{CURRENT_NAMING_VERSION}")
# Start migration in background task
asyncio.create_task(
ExampleImagesMigration.run_migrations(example_images_path, current_version, CURRENT_NAMING_VERSION)
)
for library_name, library_path in iter_library_roots():
if not library_path or not os.path.exists(library_path):
continue
current_version = 0
progress_file = os.path.join(library_path, '.download_progress.json')
if os.path.exists(progress_file):
try:
with open(progress_file, 'r', encoding='utf-8') as f:
progress_data = json.load(f)
current_version = progress_data.get('naming_version', 0)
except Exception as e:
logger.error(f"Failed to load progress file for migration check: {e}")
if current_version < CURRENT_NAMING_VERSION:
logger.info(
"Starting example images naming migration from v%s to v%s for library '%s'",
current_version,
CURRENT_NAMING_VERSION,
library_name,
)
asyncio.create_task(
ExampleImagesMigration.run_migrations(library_path, current_version, CURRENT_NAMING_VERSION)
)
@staticmethod
async def run_migrations(example_images_path, from_version, to_version):

View File

@@ -0,0 +1,221 @@
"""Utility helpers for resolving example image storage paths."""
from __future__ import annotations
import logging
import os
import re
import shutil
from typing import Iterable, List, Optional, Tuple
from ..services.settings_manager import settings
_HEX_PATTERN = re.compile(r"[a-fA-F0-9]{64}")
logger = logging.getLogger(__name__)
def _get_configured_libraries() -> List[str]:
"""Return configured library names if multi-library support is enabled."""
libraries = settings.get("libraries")
if isinstance(libraries, dict) and libraries:
return list(libraries.keys())
return []
def get_example_images_root() -> str:
"""Return the root directory configured for example images."""
root = settings.get("example_images_path") or ""
return os.path.abspath(root) if root else ""
def uses_library_scoped_folders() -> bool:
"""Return True when example images should be separated per library."""
libraries = _get_configured_libraries()
return len(libraries) > 1
def sanitize_library_name(library_name: Optional[str]) -> str:
"""Return a filesystem safe library name."""
name = library_name or settings.get_active_library_name() or "default"
safe_name = re.sub(r"[^A-Za-z0-9_.-]", "_", name)
return safe_name or "default"
def get_library_root(library_name: Optional[str] = None) -> str:
"""Return the directory where a library's example images should live."""
root = get_example_images_root()
if not root:
return ""
if uses_library_scoped_folders():
return os.path.join(root, sanitize_library_name(library_name))
return root
def ensure_library_root_exists(library_name: Optional[str] = None) -> str:
"""Ensure the example image directory for a library exists and return it."""
library_root = get_library_root(library_name)
if library_root:
os.makedirs(library_root, exist_ok=True)
return library_root
def get_model_folder(model_hash: str, library_name: Optional[str] = None) -> str:
"""Return the folder path for a model's example images."""
if not model_hash:
return ""
library_root = ensure_library_root_exists(library_name)
if not library_root:
return ""
normalized_hash = (model_hash or "").lower()
resolved_folder = os.path.join(library_root, normalized_hash)
if uses_library_scoped_folders():
legacy_root = get_example_images_root()
legacy_folder = os.path.join(legacy_root, normalized_hash)
if os.path.exists(legacy_folder) and not os.path.exists(resolved_folder):
try:
os.makedirs(library_root, exist_ok=True)
shutil.move(legacy_folder, resolved_folder)
logger.info(
"Migrated legacy example images folder '%s' to '%s'", legacy_folder, resolved_folder
)
except OSError as exc:
logger.error(
"Failed to migrate example images from '%s' to '%s': %s",
legacy_folder,
resolved_folder,
exc,
)
return legacy_folder
return resolved_folder
class ExampleImagePathResolver:
"""Convenience wrapper exposing example image path helpers."""
@staticmethod
def get_model_folder(model_hash: str, library_name: Optional[str] = None) -> str:
"""Return the example image folder for a model, migrating legacy paths."""
return get_model_folder(model_hash, library_name)
@staticmethod
def get_library_root(library_name: Optional[str] = None) -> str:
"""Return the configured library root for example images."""
return get_library_root(library_name)
@staticmethod
def ensure_library_root_exists(library_name: Optional[str] = None) -> str:
"""Ensure the library root exists before writing files."""
return ensure_library_root_exists(library_name)
@staticmethod
def get_model_relative_path(model_hash: str, library_name: Optional[str] = None) -> str:
"""Return the relative path to a model folder from the static mount point."""
return get_model_relative_path(model_hash, library_name)
def get_model_relative_path(model_hash: str, library_name: Optional[str] = None) -> str:
"""Return the relative URL path from the static mount to a model folder."""
root = get_example_images_root()
folder = get_model_folder(model_hash, library_name)
if not root or not folder:
return ""
try:
relative = os.path.relpath(folder, root)
except ValueError:
return ""
return relative.replace("\\", "/")
def iter_library_roots() -> Iterable[Tuple[str, str]]:
"""Yield configured library names and their resolved filesystem roots."""
root = get_example_images_root()
if not root:
return []
libraries = _get_configured_libraries()
if uses_library_scoped_folders():
results: List[Tuple[str, str]] = []
if libraries:
for library in libraries:
results.append((library, get_library_root(library)))
else:
# Fall back to the active library to avoid skipping migrations/cleanup
active = settings.get_active_library_name() or "default"
results.append((active, get_library_root(active)))
return results
active = settings.get_active_library_name() or "default"
return [(active, root)]
def is_hash_folder(name: str) -> bool:
"""Return True if the provided name looks like a model hash folder."""
return bool(_HEX_PATTERN.fullmatch(name or ""))
def is_valid_example_images_root(folder_path: str) -> bool:
"""Check whether a folder looks like a dedicated example images root."""
try:
items = os.listdir(folder_path)
except OSError:
return False
for item in items:
item_path = os.path.join(folder_path, item)
if item == ".download_progress.json" and os.path.isfile(item_path):
continue
if os.path.isdir(item_path):
if is_hash_folder(item):
continue
if item == "_deleted":
# Allow cleanup staging folders
continue
# When multi-library mode is active we expect nested hash folders
if uses_library_scoped_folders():
if _library_folder_has_only_hash_dirs(item_path):
continue
return False
return True
def _library_folder_has_only_hash_dirs(path: str) -> bool:
"""Return True when a library subfolder only contains hash folders or metadata files."""
try:
for entry in os.listdir(path):
entry_path = os.path.join(path, entry)
if entry == ".download_progress.json" and os.path.isfile(entry_path):
continue
if entry == "_deleted" and os.path.isdir(entry_path):
continue
if not os.path.isdir(entry_path) or not is_hash_folder(entry):
return False
except OSError:
return False
return True

View File

@@ -1,18 +1,26 @@
import logging
import os
import re
import tempfile
import random
import string
from aiohttp import web
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS
from ..services.service_registry import ServiceRegistry
from ..services.settings_manager import settings
from ..utils.example_images_paths import get_model_folder, get_model_relative_path
from .example_images_metadata import MetadataUpdater
from ..utils.metadata_manager import MetadataManager
logger = logging.getLogger(__name__)
class ExampleImagesImportError(RuntimeError):
"""Base error for example image import operations."""
class ExampleImagesValidationError(ExampleImagesImportError):
"""Raised when input validation fails."""
class ExampleImagesProcessor:
"""Processes and manipulates example images"""
@@ -299,90 +307,29 @@ class ExampleImagesProcessor:
return False
@staticmethod
async def import_images(request):
"""
Import local example images
Accepts:
- multipart/form-data form with model_hash and files fields
or
- JSON request with model_hash and file_paths
Returns:
- Success status and list of imported files
"""
async def import_images(model_hash: str, files_to_import: list[str]):
"""Import local example images for a model."""
if not model_hash:
raise ExampleImagesValidationError('Missing model_hash parameter')
if not files_to_import:
raise ExampleImagesValidationError('No files provided to import')
try:
model_hash = None
files_to_import = []
temp_files_to_cleanup = []
# Check if it's a multipart form-data request (direct file upload)
if request.content_type and 'multipart/form-data' in request.content_type:
reader = await request.multipart()
# First get model_hash
field = await reader.next()
if field.name == 'model_hash':
model_hash = await field.text()
# Then process all files
while True:
field = await reader.next()
if field is None:
break
if field.name == 'files':
# Create a temporary file with appropriate suffix for type detection
file_name = field.filename
file_ext = os.path.splitext(file_name)[1].lower()
with tempfile.NamedTemporaryFile(suffix=file_ext, delete=False) as tmp_file:
temp_path = tmp_file.name
temp_files_to_cleanup.append(temp_path) # Track for cleanup
# Write chunks to the temporary file
while True:
chunk = await field.read_chunk()
if not chunk:
break
tmp_file.write(chunk)
# Add to the list of files to process
files_to_import.append(temp_path)
else:
# Parse JSON request (legacy method using file paths)
data = await request.json()
model_hash = data.get('model_hash')
files_to_import = data.get('file_paths', [])
if not model_hash:
return web.json_response({
'success': False,
'error': 'Missing model_hash parameter'
}, status=400)
if not files_to_import:
return web.json_response({
'success': False,
'error': 'No files provided to import'
}, status=400)
# Get example images path
example_images_path = settings.get('example_images_path')
if not example_images_path:
return web.json_response({
'success': False,
'error': 'No example images path configured'
}, status=400)
raise ExampleImagesValidationError('No example images path configured')
# Find the model and get current metadata
lora_scanner = await ServiceRegistry.get_lora_scanner()
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
model_data = None
scanner = None
# Check both scanners to find the model
for scan_obj in [lora_scanner, checkpoint_scanner, embedding_scanner]:
cache = await scan_obj.get_cached_data()
@@ -393,21 +340,22 @@ class ExampleImagesProcessor:
break
if model_data:
break
if not model_data:
return web.json_response({
'success': False,
'error': f"Model with hash {model_hash} not found in cache"
}, status=404)
raise ExampleImagesImportError(
f"Model with hash {model_hash} not found in cache"
)
# Create model folder
model_folder = os.path.join(example_images_path, model_hash)
model_folder = get_model_folder(model_hash)
if not model_folder:
raise ExampleImagesImportError('Failed to resolve model folder for example images')
os.makedirs(model_folder, exist_ok=True)
imported_files = []
errors = []
newly_imported_paths = []
# Process each file path
for file_path in files_to_import:
try:
@@ -415,68 +363,60 @@ class ExampleImagesProcessor:
if not os.path.isfile(file_path):
errors.append(f"File not found: {file_path}")
continue
# Check if file type is supported
file_ext = os.path.splitext(file_path)[1].lower()
if not (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
if not (file_ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or
file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']):
errors.append(f"Unsupported file type: {file_path}")
continue
# Generate new filename using short ID instead of UUID
short_id = ExampleImagesProcessor.generate_short_id()
new_filename = f"custom_{short_id}{file_ext}"
dest_path = os.path.join(model_folder, new_filename)
# Copy the file
import shutil
shutil.copy2(file_path, dest_path)
# Store both the dest_path and the short_id
newly_imported_paths.append((dest_path, short_id))
# Add to imported files list
imported_files.append({
'name': new_filename,
'path': f'/example_images_static/{model_hash}/{new_filename}',
'path': f'/example_images_static/{get_model_relative_path(model_hash)}/{new_filename}',
'extension': file_ext,
'is_video': file_ext in SUPPORTED_MEDIA_EXTENSIONS['videos']
})
except Exception as e:
errors.append(f"Error importing {file_path}: {str(e)}")
# Update metadata with new example images
regular_images, custom_images = await MetadataUpdater.update_metadata_after_import(
model_hash,
model_hash,
model_data,
scanner,
newly_imported_paths
)
return web.json_response({
return {
'success': len(imported_files) > 0,
'message': f'Successfully imported {len(imported_files)} files' +
'message': f'Successfully imported {len(imported_files)} files' +
(f' with {len(errors)} errors' if errors else ''),
'files': imported_files,
'errors': errors,
'regular_images': regular_images,
'custom_images': custom_images,
"model_file_path": model_data.get('file_path', ''),
})
}
except ExampleImagesImportError:
raise
except Exception as e:
logger.error(f"Failed to import example images: {e}", exc_info=True)
return web.json_response({
'success': False,
'error': str(e)
}, status=500)
finally:
# Clean up temporary files
for temp_file in temp_files_to_cleanup:
try:
os.remove(temp_file)
except Exception as e:
logger.error(f"Failed to remove temporary file {temp_file}: {e}")
raise ExampleImagesImportError(str(e)) from e
@staticmethod
async def delete_custom_image(request):
@@ -560,7 +500,12 @@ class ExampleImagesProcessor:
}, status=404)
# Find and delete the actual file
model_folder = os.path.join(example_images_path, model_hash)
model_folder = get_model_folder(model_hash)
if not model_folder:
return web.json_response({
'success': False,
'error': 'Failed to resolve model folder for example images'
}, status=500)
file_deleted = False
if os.path.exists(model_folder):

View File

@@ -4,7 +4,7 @@ import logging
from typing import Optional
from io import BytesIO
import os
from PIL import Image
from PIL import Image, PngImagePlugin
logger = logging.getLogger(__name__)
@@ -86,9 +86,10 @@ class ExifUtils:
# For PNG, try to update parameters directly
if img_format == 'PNG':
# We'll save with parameters in the PNG info
info_dict = {'parameters': metadata}
img.save(image_path, format='PNG', pnginfo=info_dict)
# Use PngInfo instead of plain dictionary
png_info = PngImagePlugin.PngInfo()
png_info.add_text("parameters", metadata)
img.save(image_path, format='PNG', pnginfo=png_info)
return image_path
# For WebP format, use PIL's exif parameter directly

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,84 @@
"""Utilities for locating and migrating the LoRA Manager settings file."""
from __future__ import annotations
import logging
import os
import shutil
from typing import Optional
from platformdirs import user_config_dir
APP_NAME = "ComfyUI-LoRA-Manager"
_LOGGER = logging.getLogger(__name__)
def get_project_root() -> str:
"""Return the root directory of the project repository."""
return os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
def get_legacy_settings_path() -> str:
"""Return the legacy location of ``settings.json`` within the project tree."""
return os.path.join(get_project_root(), "settings.json")
def get_settings_dir(create: bool = True) -> str:
"""Return the user configuration directory for the application.
Args:
create: Whether to create the directory if it does not already exist.
Returns:
The absolute path to the user configuration directory.
"""
config_dir = user_config_dir(APP_NAME, appauthor=False)
if create:
os.makedirs(config_dir, exist_ok=True)
return config_dir
def get_settings_file_path(create_dir: bool = True) -> str:
"""Return the path to ``settings.json`` in the user configuration directory."""
return os.path.join(get_settings_dir(create=create_dir), "settings.json")
def ensure_settings_file(logger: Optional[logging.Logger] = None) -> str:
"""Ensure the settings file resides in the user configuration directory.
If a legacy ``settings.json`` is detected in the project root it is migrated to
the platform-specific user configuration folder. The caller receives the path
to the settings file irrespective of whether a migration was needed.
Args:
logger: Optional logger used for migration messages. Falls back to a
module level logger when omitted.
Returns:
The absolute path to ``settings.json`` in the user configuration folder.
"""
logger = logger or _LOGGER
target_path = get_settings_file_path(create_dir=True)
legacy_path = get_legacy_settings_path()
if os.path.exists(legacy_path) and not os.path.exists(target_path):
try:
os.makedirs(os.path.dirname(target_path), exist_ok=True)
shutil.move(legacy_path, target_path)
logger.info("Migrated settings.json to %s", target_path)
except Exception as exc: # pragma: no cover - defensive fallback path
logger.warning("Failed to move legacy settings.json: %s", exc)
try:
shutil.copy2(legacy_path, target_path)
logger.info("Copied legacy settings.json to %s", target_path)
except Exception as copy_exc: # pragma: no cover - defensive fallback path
logger.error("Could not migrate settings.json: %s", copy_exc)
return target_path

View File

@@ -1,6 +1,5 @@
import os
import json
import sys
import time
import asyncio
import logging
@@ -12,11 +11,21 @@ from ..config import config
from ..services.service_registry import ServiceRegistry
# Check if running in standalone mode
standalone_mode = 'nodes' not in sys.modules
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
# Define constants locally to avoid dependency on conditional imports
MODELS = "models"
LORAS = "loras"
if not standalone_mode:
from ..metadata_collector.metadata_registry import MetadataRegistry
from ..metadata_collector.constants import MODELS, LORAS
# Import constants from metadata_collector to ensure consistency, but we have fallbacks defined above
try:
from ..metadata_collector.constants import MODELS as _MODELS, LORAS as _LORAS
MODELS = _MODELS
LORAS = _LORAS
except ImportError:
pass # Use the local definitions
logger = logging.getLogger(__name__)

View File

@@ -189,6 +189,9 @@ def calculate_relative_path_for_model(model_data: Dict, model_type: str = 'lora'
formatted_path = formatted_path.replace('{first_tag}', first_tag)
formatted_path = formatted_path.replace('{author}', author)
if model_type == 'embedding':
formatted_path = formatted_path.replace(' ', '_')
return formatted_path
def remove_empty_dirs(path):

View File

@@ -1,7 +1,7 @@
[project]
name = "comfyui-lora-manager"
description = "Revolutionize your workflow with the ultimate LoRA companion for ComfyUI!"
version = "0.9.4"
version = "0.9.6"
license = {file = "LICENSE"}
dependencies = [
"aiohttp",
@@ -13,7 +13,8 @@ dependencies = [
"toml",
"natsort",
"GitPython",
"aiosqlite"
"aiosqlite",
"platformdirs"
]
[project.urls]

11
pytest.ini Normal file
View File

@@ -0,0 +1,11 @@
[pytest]
addopts = -v --import-mode=importlib
testpaths = tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
# Register async marker for coroutine-style tests
markers =
asyncio: execute test within asyncio event loop
# Skip problematic directories to avoid import conflicts
norecursedirs = .git .tox dist build *.egg __pycache__ py

View File

@@ -0,0 +1,110 @@
{
"id": 1231067,
"name": "Vivid Impressions Storybook Style",
"description": "<h3 id=\"if-you'd-like-to-support-me-feel-free-to-visit-my-ko-fi-page.-please-share-your-images-using-the-&quot;+add-post&quot;-button-below.-it-supports-the-creators.-thanks!-nnfwkvfly\">If you'd like to support me, feel free to visit my <a target=\"_blank\" rel=\"ugc\" href=\"https://ko-fi.com/pixelpawsai\">Ko-Fi</a> page. ❤️<br /><br />Please share your images using the \"<span style=\"color:rgb(250, 82, 82)\">+add post</span>\" button below. It supports the creators. Thanks! 💕</h3><h3 id=\"if-you-like-my-lora-please-like-comment-or-donate-some-buzz.-much-appreciated!-vyeqok3go\">If you like my LoRA, please<span style=\"color:rgb(230, 73, 128)\"> </span><span style=\"color:rgb(250, 82, 82)\">like</span>, <span style=\"color:rgb(250, 82, 82)\">comment</span>, or <span style=\"color:#fa5252\">donate some Buzz</span>. Much appreciated! ❤️</h3><h3 id=\"-lo912t8rj\"></h3><h3 id=\"trigger-word:-ppstorybook-wlggllim2\"><strong><span style=\"color:rgb(253, 126, 20)\">Trigger word: </span></strong>ppstorybook</h3><h3 id=\"strength:-0.8-experiment-as-you-like-luvhks6za\"><strong><span style=\"color:rgb(253, 126, 20)\">Strength: </span></strong>0.8, experiment as you like</h3>",
"allowNoCredit": true,
"allowCommercialUse": [
"Image",
"RentCivit",
"Rent",
"Sell"
],
"allowDerivatives": true,
"allowDifferentLicense": true,
"type": "LORA",
"minor": false,
"sfwOnly": false,
"poi": false,
"nsfw": false,
"nsfwLevel": 1,
"availability": "Public",
"cosmetic": null,
"supportsGeneration": true,
"stats": {
"downloadCount": 2183,
"favoriteCount": 0,
"thumbsUpCount": 416,
"thumbsDownCount": 0,
"commentCount": 12,
"ratingCount": 0,
"rating": 0,
"tippedAmountCount": 360
},
"creator": {
"username": "PixelPawsAI",
"image": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f3a1aa7c-0159-4dd8-884a-1e7ceb350f96/width=96/PixelPawsAI.jpeg"
},
"tags": [
"style",
"illustration",
"storybook"
],
"modelVersions": [
{
"id": 1387174,
"index": 0,
"name": "v1.0",
"baseModel": "Flux.1 D",
"baseModelType": "Standard",
"createdAt": "2025-02-08T11:15:47.197Z",
"publishedAt": "2025-02-08T11:29:04.487Z",
"status": "Published",
"availability": "Public",
"nsfwLevel": 1,
"trainedWords": [
"ppstorybook"
],
"covered": true,
"stats": {
"downloadCount": 2183,
"ratingCount": 0,
"rating": 0,
"thumbsUpCount": 416,
"thumbsDownCount": 0
},
"files": [
{
"id": 1289799,
"sizeKB": 18829.1484375,
"name": "pp-storybook_rank2_bf16.safetensors",
"type": "Model",
"pickleScanResult": "Success",
"pickleScanMessage": "No Pickle imports",
"virusScanResult": "Success",
"virusScanMessage": null,
"scannedAt": "2025-02-08T11:21:04.247Z",
"metadata": {
"format": "SafeTensor"
},
"hashes": {
"AutoV1": "F414C813",
"AutoV2": "9753338AB6",
"SHA256": "9753338AB693CA82BF89ED77A5D1912879E40051463EC6E330FB9866CE798668",
"CRC32": "A65AE7B3",
"BLAKE3": "A5F8AB95AC2486345E4ACCAE541FF19D97ED53EFB0A7CC9226636975A0437591",
"AutoV3": "34A22376739D"
},
"downloadUrl": "https://civitai.com/api/download/models/1387174",
"primary": true
}
],
"images": [
{
"url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/42b875cf-c62b-41fa-a349-383b7f074351/original=true/56547310.jpeg",
"nsfwLevel": 1,
"width": 832,
"height": 1216,
"hash": "U5IiO6s-4Vn+0~EO^5xa00VsL#IU_O?E7yWC",
"type": "image",
"minor": false,
"poi": false,
"hasMeta": true,
"hasPositivePrompt": true,
"onSite": false,
"remixOfId": null
}
],
"downloadUrl": "https://civitai.com/api/download/models/1387174"
}
]
}

Some files were not shown because too many files have changed in this diff Show More