mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-05-06 16:36:45 -03:00
Compare commits
47 Commits
misc-page
...
modal-rewo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26884630d3 | ||
|
|
66e9d77c67 | ||
|
|
5ffca15172 | ||
|
|
4d9115339b | ||
|
|
469f7a1829 | ||
|
|
d27e3c8126 | ||
|
|
7bc63d7631 | ||
|
|
1606a3ff46 | ||
|
|
b313f36be9 | ||
|
|
fa3625ff72 | ||
|
|
895d13dc96 | ||
|
|
b7e0821f66 | ||
|
|
36e3e62e70 | ||
|
|
7bcf4e4491 | ||
|
|
c12aefa82a | ||
|
|
990a3527e4 | ||
|
|
655d3cab71 | ||
|
|
358e658459 | ||
|
|
f28c32f2b1 | ||
|
|
f5dbd6b8e8 | ||
|
|
2c026a2646 | ||
|
|
bd83f7520e | ||
|
|
b9a4e7a09b | ||
|
|
c30e57ede8 | ||
|
|
0dba1b336d | ||
|
|
820afe9319 | ||
|
|
5a97f4bc75 | ||
|
|
94da404cc5 | ||
|
|
1da476d858 | ||
|
|
1daaff6bd4 | ||
|
|
e252e44403 | ||
|
|
778ad8abd2 | ||
|
|
68cf381b50 | ||
|
|
337f73e711 | ||
|
|
04ba966a6e | ||
|
|
71c8cf84e0 | ||
|
|
db1aec94e5 | ||
|
|
553e1868e1 | ||
|
|
938ceb49b2 | ||
|
|
c0f03b79a8 | ||
|
|
a492638133 | ||
|
|
e17d6c8ebf | ||
|
|
ffcfe5ea3e | ||
|
|
719e18adb6 | ||
|
|
92d471daf5 | ||
|
|
66babf9ee1 | ||
|
|
60df2df324 |
201
.agents/skills/lora-manager-e2e/SKILL.md
Normal file
201
.agents/skills/lora-manager-e2e/SKILL.md
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
---
|
||||||
|
name: lora-manager-e2e
|
||||||
|
description: End-to-end testing and validation for LoRa Manager features. Use when performing automated E2E validation of LoRa Manager standalone mode, including starting/restarting the server, using Chrome DevTools MCP to interact with the web UI at http://127.0.0.1:8188/loras, and verifying frontend-to-backend functionality. Covers workflow validation, UI interaction testing, and integration testing between the standalone Python backend and the browser frontend.
|
||||||
|
---
|
||||||
|
|
||||||
|
# LoRa Manager E2E Testing
|
||||||
|
|
||||||
|
This skill provides workflows and utilities for end-to-end testing of LoRa Manager using Chrome DevTools MCP.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- LoRa Manager project cloned and dependencies installed (`pip install -r requirements.txt`)
|
||||||
|
- Chrome browser available for debugging
|
||||||
|
- Chrome DevTools MCP connected
|
||||||
|
|
||||||
|
## Quick Start Workflow
|
||||||
|
|
||||||
|
### 1. Start LoRa Manager Standalone
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Use the provided script to start the server
|
||||||
|
python .agents/skills/lora-manager-e2e/scripts/start_server.py --port 8188
|
||||||
|
```
|
||||||
|
|
||||||
|
Or manually:
|
||||||
|
```bash
|
||||||
|
cd /home/miao/workspace/ComfyUI/custom_nodes/ComfyUI-Lora-Manager
|
||||||
|
python standalone.py --port 8188
|
||||||
|
```
|
||||||
|
|
||||||
|
Wait for server ready message before proceeding.
|
||||||
|
|
||||||
|
### 2. Open Chrome Debug Mode
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Chrome with remote debugging on port 9222
|
||||||
|
google-chrome --remote-debugging-port=9222 --user-data-dir=/tmp/chrome-lora-manager http://127.0.0.1:8188/loras
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Connect Chrome DevTools MCP
|
||||||
|
|
||||||
|
Ensure the MCP server is connected to Chrome at `http://localhost:9222`.
|
||||||
|
|
||||||
|
### 4. Navigate and Interact
|
||||||
|
|
||||||
|
Use Chrome DevTools MCP tools to:
|
||||||
|
- Take snapshots: `take_snapshot`
|
||||||
|
- Click elements: `click`
|
||||||
|
- Fill forms: `fill` or `fill_form`
|
||||||
|
- Evaluate scripts: `evaluate_script`
|
||||||
|
- Wait for elements: `wait_for`
|
||||||
|
|
||||||
|
## Common E2E Test Patterns
|
||||||
|
|
||||||
|
### Pattern: Full Page Load Verification
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Navigate to LoRA list page
|
||||||
|
navigate_page(type="url", url="http://127.0.0.1:8188/loras")
|
||||||
|
|
||||||
|
# Wait for page to load
|
||||||
|
wait_for(text="LoRAs", timeout=10000)
|
||||||
|
|
||||||
|
# Take snapshot to verify UI state
|
||||||
|
snapshot = take_snapshot()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Restart Server for Configuration Changes
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Stop current server (if running)
|
||||||
|
# Start with new configuration
|
||||||
|
python .agents/skills/lora-manager-e2e/scripts/start_server.py --port 8188 --restart
|
||||||
|
|
||||||
|
# Wait and refresh browser
|
||||||
|
navigate_page(type="reload", ignoreCache=True)
|
||||||
|
wait_for(text="LoRAs", timeout=15000)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Verify Backend API via Frontend
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Execute script in browser to call backend API
|
||||||
|
result = evaluate_script(function="""
|
||||||
|
async () => {
|
||||||
|
const response = await fetch('/loras/api/list');
|
||||||
|
const data = await response.json();
|
||||||
|
return { count: data.length, firstItem: data[0]?.name };
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Form Submission Flow
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Fill a form (e.g., search or filter)
|
||||||
|
fill_form(elements=[
|
||||||
|
{"uid": "search-input", "value": "character"},
|
||||||
|
])
|
||||||
|
|
||||||
|
# Click submit button
|
||||||
|
click(uid="search-button")
|
||||||
|
|
||||||
|
# Wait for results
|
||||||
|
wait_for(text="Results", timeout=5000)
|
||||||
|
|
||||||
|
# Verify results via snapshot
|
||||||
|
snapshot = take_snapshot()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Modal Dialog Interaction
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Open modal (e.g., add LoRA)
|
||||||
|
click(uid="add-lora-button")
|
||||||
|
|
||||||
|
# Wait for modal to appear
|
||||||
|
wait_for(text="Add LoRA", timeout=3000)
|
||||||
|
|
||||||
|
# Fill modal form
|
||||||
|
fill_form(elements=[
|
||||||
|
{"uid": "lora-name", "value": "Test LoRA"},
|
||||||
|
{"uid": "lora-path", "value": "/path/to/lora.safetensors"},
|
||||||
|
])
|
||||||
|
|
||||||
|
# Submit
|
||||||
|
click(uid="modal-submit-button")
|
||||||
|
|
||||||
|
# Wait for success message or close
|
||||||
|
wait_for(text="Success", timeout=5000)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Scripts
|
||||||
|
|
||||||
|
### scripts/start_server.py
|
||||||
|
|
||||||
|
Starts or restarts the LoRa Manager standalone server.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python scripts/start_server.py [--port PORT] [--restart] [--wait]
|
||||||
|
```
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- `--port`: Server port (default: 8188)
|
||||||
|
- `--restart`: Kill existing server before starting
|
||||||
|
- `--wait`: Wait for server to be ready before exiting
|
||||||
|
|
||||||
|
### scripts/wait_for_server.py
|
||||||
|
|
||||||
|
Polls server until ready or timeout.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python scripts/wait_for_server.py [--port PORT] [--timeout SECONDS]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test Scenarios Reference
|
||||||
|
|
||||||
|
See [references/test-scenarios.md](references/test-scenarios.md) for detailed test scenarios including:
|
||||||
|
- LoRA list display and filtering
|
||||||
|
- Model metadata editing
|
||||||
|
- Recipe creation and management
|
||||||
|
- Settings configuration
|
||||||
|
- Import/export functionality
|
||||||
|
|
||||||
|
## Network Request Verification
|
||||||
|
|
||||||
|
Use `list_network_requests` and `get_network_request` to verify API calls:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# List recent XHR/fetch requests
|
||||||
|
requests = list_network_requests(resourceTypes=["xhr", "fetch"])
|
||||||
|
|
||||||
|
# Get details of specific request
|
||||||
|
details = get_network_request(reqid=123)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Console Message Monitoring
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Check for errors or warnings
|
||||||
|
messages = list_console_messages(types=["error", "warn"])
|
||||||
|
```
|
||||||
|
|
||||||
|
## Performance Testing
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Start performance trace
|
||||||
|
performance_start_trace(reload=True, autoStop=False)
|
||||||
|
|
||||||
|
# Perform actions...
|
||||||
|
|
||||||
|
# Stop and analyze
|
||||||
|
results = performance_stop_trace()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cleanup
|
||||||
|
|
||||||
|
Always ensure proper cleanup after tests:
|
||||||
|
1. Stop the standalone server
|
||||||
|
2. Close browser pages (keep at least one open)
|
||||||
|
3. Clear temporary data if needed
|
||||||
324
.agents/skills/lora-manager-e2e/references/mcp-cheatsheet.md
Normal file
324
.agents/skills/lora-manager-e2e/references/mcp-cheatsheet.md
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
# Chrome DevTools MCP Cheatsheet for LoRa Manager
|
||||||
|
|
||||||
|
Quick reference for common MCP commands used in LoRa Manager E2E testing.
|
||||||
|
|
||||||
|
## Navigation
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Navigate to LoRA list page
|
||||||
|
navigate_page(type="url", url="http://127.0.0.1:8188/loras")
|
||||||
|
|
||||||
|
# Reload page with cache clear
|
||||||
|
navigate_page(type="reload", ignoreCache=True)
|
||||||
|
|
||||||
|
# Go back/forward
|
||||||
|
navigate_page(type="back")
|
||||||
|
navigate_page(type="forward")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Waiting
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Wait for text to appear
|
||||||
|
wait_for(text="LoRAs", timeout=10000)
|
||||||
|
|
||||||
|
# Wait for specific element (via evaluate_script)
|
||||||
|
evaluate_script(function="""
|
||||||
|
() => {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const check = () => {
|
||||||
|
if (document.querySelector('.lora-card')) {
|
||||||
|
resolve(true);
|
||||||
|
} else {
|
||||||
|
setTimeout(check, 100);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
check();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Taking Snapshots
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Full page snapshot
|
||||||
|
snapshot = take_snapshot()
|
||||||
|
|
||||||
|
# Verbose snapshot (more details)
|
||||||
|
snapshot = take_snapshot(verbose=True)
|
||||||
|
|
||||||
|
# Save to file
|
||||||
|
take_snapshot(filePath="test-snapshots/page-load.json")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Element Interaction
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Click element
|
||||||
|
click(uid="element-uid-from-snapshot")
|
||||||
|
|
||||||
|
# Double click
|
||||||
|
click(uid="element-uid", dblClick=True)
|
||||||
|
|
||||||
|
# Fill input
|
||||||
|
fill(uid="search-input", value="test query")
|
||||||
|
|
||||||
|
# Fill multiple inputs
|
||||||
|
fill_form(elements=[
|
||||||
|
{"uid": "input-1", "value": "value 1"},
|
||||||
|
{"uid": "input-2", "value": "value 2"},
|
||||||
|
])
|
||||||
|
|
||||||
|
# Hover
|
||||||
|
hover(uid="lora-card-1")
|
||||||
|
|
||||||
|
# Upload file
|
||||||
|
upload_file(uid="file-input", filePath="/path/to/file.safetensors")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Keyboard Input
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Press key
|
||||||
|
press_key(key="Enter")
|
||||||
|
press_key(key="Escape")
|
||||||
|
press_key(key="Tab")
|
||||||
|
|
||||||
|
# Keyboard shortcuts
|
||||||
|
press_key(key="Control+A") # Select all
|
||||||
|
press_key(key="Control+F") # Find
|
||||||
|
```
|
||||||
|
|
||||||
|
## JavaScript Evaluation
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Simple evaluation
|
||||||
|
result = evaluate_script(function="() => document.title")
|
||||||
|
|
||||||
|
# Async evaluation
|
||||||
|
result = evaluate_script(function="""
|
||||||
|
async () => {
|
||||||
|
const response = await fetch('/loras/api/list');
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Check element existence
|
||||||
|
exists = evaluate_script(function="""
|
||||||
|
() => document.querySelector('.lora-card') !== null
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Get element count
|
||||||
|
count = evaluate_script(function="""
|
||||||
|
() => document.querySelectorAll('.lora-card').length
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Network Monitoring
|
||||||
|
|
||||||
|
```python
|
||||||
|
# List all network requests
|
||||||
|
requests = list_network_requests()
|
||||||
|
|
||||||
|
# Filter by resource type
|
||||||
|
xhr_requests = list_network_requests(resourceTypes=["xhr", "fetch"])
|
||||||
|
|
||||||
|
# Get specific request details
|
||||||
|
details = get_network_request(reqid=123)
|
||||||
|
|
||||||
|
# Include preserved requests from previous navigations
|
||||||
|
all_requests = list_network_requests(includePreservedRequests=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Console Monitoring
|
||||||
|
|
||||||
|
```python
|
||||||
|
# List all console messages
|
||||||
|
messages = list_console_messages()
|
||||||
|
|
||||||
|
# Filter by type
|
||||||
|
errors = list_console_messages(types=["error", "warn"])
|
||||||
|
|
||||||
|
# Include preserved messages
|
||||||
|
all_messages = list_console_messages(includePreservedMessages=True)
|
||||||
|
|
||||||
|
# Get specific message
|
||||||
|
details = get_console_message(msgid=1)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Performance Testing
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Start trace with page reload
|
||||||
|
performance_start_trace(reload=True, autoStop=False)
|
||||||
|
|
||||||
|
# Start trace without reload
|
||||||
|
performance_start_trace(reload=False, autoStop=True, filePath="trace.json.gz")
|
||||||
|
|
||||||
|
# Stop trace
|
||||||
|
results = performance_stop_trace()
|
||||||
|
|
||||||
|
# Stop and save
|
||||||
|
performance_stop_trace(filePath="trace-results.json.gz")
|
||||||
|
|
||||||
|
# Analyze specific insight
|
||||||
|
insight = performance_analyze_insight(
|
||||||
|
insightSetId="results.insightSets[0].id",
|
||||||
|
insightName="LCPBreakdown"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Page Management
|
||||||
|
|
||||||
|
```python
|
||||||
|
# List open pages
|
||||||
|
pages = list_pages()
|
||||||
|
|
||||||
|
# Select a page
|
||||||
|
select_page(pageId=0, bringToFront=True)
|
||||||
|
|
||||||
|
# Create new page
|
||||||
|
new_page(url="http://127.0.0.1:8188/loras")
|
||||||
|
|
||||||
|
# Close page (keep at least one open!)
|
||||||
|
close_page(pageId=1)
|
||||||
|
|
||||||
|
# Resize page
|
||||||
|
resize_page(width=1920, height=1080)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Screenshots
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Full page screenshot
|
||||||
|
take_screenshot(fullPage=True)
|
||||||
|
|
||||||
|
# Viewport screenshot
|
||||||
|
take_screenshot()
|
||||||
|
|
||||||
|
# Element screenshot
|
||||||
|
take_screenshot(uid="lora-card-1")
|
||||||
|
|
||||||
|
# Save to file
|
||||||
|
take_screenshot(filePath="screenshots/page.png", format="png")
|
||||||
|
|
||||||
|
# JPEG with quality
|
||||||
|
take_screenshot(filePath="screenshots/page.jpg", format="jpeg", quality=90)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dialog Handling
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Accept dialog
|
||||||
|
handle_dialog(action="accept")
|
||||||
|
|
||||||
|
# Accept with text input
|
||||||
|
handle_dialog(action="accept", promptText="user input")
|
||||||
|
|
||||||
|
# Dismiss dialog
|
||||||
|
handle_dialog(action="dismiss")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Device Emulation
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Mobile viewport
|
||||||
|
emulate(viewport={"width": 375, "height": 667, "isMobile": True, "hasTouch": True})
|
||||||
|
|
||||||
|
# Tablet viewport
|
||||||
|
emulate(viewport={"width": 768, "height": 1024, "isMobile": True, "hasTouch": True})
|
||||||
|
|
||||||
|
# Desktop viewport
|
||||||
|
emulate(viewport={"width": 1920, "height": 1080})
|
||||||
|
|
||||||
|
# Network throttling
|
||||||
|
emulate(networkConditions="Slow 3G")
|
||||||
|
emulate(networkConditions="Fast 4G")
|
||||||
|
|
||||||
|
# CPU throttling
|
||||||
|
emulate(cpuThrottlingRate=4) # 4x slowdown
|
||||||
|
|
||||||
|
# Geolocation
|
||||||
|
emulate(geolocation={"latitude": 37.7749, "longitude": -122.4194})
|
||||||
|
|
||||||
|
# User agent
|
||||||
|
emulate(userAgent="Mozilla/5.0 (Custom)")
|
||||||
|
|
||||||
|
# Reset emulation
|
||||||
|
emulate(viewport=None, networkConditions="No emulation", userAgent=None)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Drag and Drop
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Drag element to another
|
||||||
|
drag(from_uid="draggable-item", to_uid="drop-zone")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common LoRa Manager Test Patterns
|
||||||
|
|
||||||
|
### Verify LoRA Cards Loaded
|
||||||
|
|
||||||
|
```python
|
||||||
|
navigate_page(type="url", url="http://127.0.0.1:8188/loras")
|
||||||
|
wait_for(text="LoRAs", timeout=10000)
|
||||||
|
|
||||||
|
# Check if cards loaded
|
||||||
|
result = evaluate_script(function="""
|
||||||
|
() => {
|
||||||
|
const cards = document.querySelectorAll('.lora-card');
|
||||||
|
return {
|
||||||
|
count: cards.length,
|
||||||
|
hasData: cards.length > 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search and Verify Results
|
||||||
|
|
||||||
|
```python
|
||||||
|
fill(uid="search-input", value="character")
|
||||||
|
press_key(key="Enter")
|
||||||
|
wait_for(timeout=2000) # Wait for debounce
|
||||||
|
|
||||||
|
# Check results
|
||||||
|
result = evaluate_script(function="""
|
||||||
|
() => {
|
||||||
|
const cards = document.querySelectorAll('.lora-card');
|
||||||
|
const names = Array.from(cards).map(c => c.dataset.name || c.textContent);
|
||||||
|
return { count: cards.length, names };
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check API Response
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Trigger API call
|
||||||
|
evaluate_script(function="""
|
||||||
|
() => window.loraApiCallPromise = fetch('/loras/api/list').then(r => r.json())
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Wait and get result
|
||||||
|
import time
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
result = evaluate_script(function="""
|
||||||
|
async () => await window.loraApiCallPromise
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Monitor Console for Errors
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Before test: clear console (navigate reloads)
|
||||||
|
navigate_page(type="reload")
|
||||||
|
|
||||||
|
# ... perform actions ...
|
||||||
|
|
||||||
|
# Check for errors
|
||||||
|
errors = list_console_messages(types=["error"])
|
||||||
|
assert len(errors) == 0, f"Console errors: {errors}"
|
||||||
|
```
|
||||||
272
.agents/skills/lora-manager-e2e/references/test-scenarios.md
Normal file
272
.agents/skills/lora-manager-e2e/references/test-scenarios.md
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
# LoRa Manager E2E Test Scenarios
|
||||||
|
|
||||||
|
This document provides detailed test scenarios for end-to-end validation of LoRa Manager features.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [LoRA List Page](#lora-list-page)
|
||||||
|
2. [Model Details](#model-details)
|
||||||
|
3. [Recipes](#recipes)
|
||||||
|
4. [Settings](#settings)
|
||||||
|
5. [Import/Export](#importexport)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LoRA List Page
|
||||||
|
|
||||||
|
### Scenario: Page Load and Display
|
||||||
|
|
||||||
|
**Objective**: Verify the LoRA list page loads correctly and displays models.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to `http://127.0.0.1:8188/loras`
|
||||||
|
2. Wait for page title "LoRAs" to appear
|
||||||
|
3. Take snapshot to verify:
|
||||||
|
- Header with "LoRAs" title is visible
|
||||||
|
- Search/filter controls are present
|
||||||
|
- Grid/list view toggle exists
|
||||||
|
- LoRA cards are displayed (if models exist)
|
||||||
|
- Pagination controls (if applicable)
|
||||||
|
|
||||||
|
**Expected Result**: Page loads without errors, UI elements are present.
|
||||||
|
|
||||||
|
### Scenario: Search Functionality
|
||||||
|
|
||||||
|
**Objective**: Verify search filters LoRA models correctly.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Ensure at least one LoRA exists with known name (e.g., "test-character")
|
||||||
|
2. Navigate to LoRA list page
|
||||||
|
3. Enter search term in search box: "test"
|
||||||
|
4. Press Enter or click search button
|
||||||
|
5. Wait for results to update
|
||||||
|
|
||||||
|
**Expected Result**: Only LoRAs matching search term are displayed.
|
||||||
|
|
||||||
|
**Verification Script**:
|
||||||
|
```python
|
||||||
|
# After search, verify filtered results
|
||||||
|
evaluate_script(function="""
|
||||||
|
() => {
|
||||||
|
const cards = document.querySelectorAll('.lora-card');
|
||||||
|
const names = Array.from(cards).map(c => c.dataset.name);
|
||||||
|
return { count: cards.length, names };
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario: Filter by Tags
|
||||||
|
|
||||||
|
**Objective**: Verify tag filtering works correctly.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to LoRA list page
|
||||||
|
2. Click on a tag (e.g., "character", "style")
|
||||||
|
3. Wait for filtered results
|
||||||
|
|
||||||
|
**Expected Result**: Only LoRAs with selected tag are displayed.
|
||||||
|
|
||||||
|
### Scenario: View Mode Toggle
|
||||||
|
|
||||||
|
**Objective**: Verify grid/list view toggle works.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to LoRA list page
|
||||||
|
2. Click list view button
|
||||||
|
3. Verify list layout
|
||||||
|
4. Click grid view button
|
||||||
|
5. Verify grid layout
|
||||||
|
|
||||||
|
**Expected Result**: View mode changes correctly, layout updates.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Model Details
|
||||||
|
|
||||||
|
### Scenario: Open Model Details
|
||||||
|
|
||||||
|
**Objective**: Verify clicking a LoRA opens its details.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to LoRA list page
|
||||||
|
2. Click on a LoRA card
|
||||||
|
3. Wait for details panel/modal to open
|
||||||
|
|
||||||
|
**Expected Result**: Details panel shows:
|
||||||
|
- Model name
|
||||||
|
- Preview image
|
||||||
|
- Metadata (trigger words, tags, etc.)
|
||||||
|
- Action buttons (edit, delete, etc.)
|
||||||
|
|
||||||
|
### Scenario: Edit Model Metadata
|
||||||
|
|
||||||
|
**Objective**: Verify metadata editing works end-to-end.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Open a LoRA's details
|
||||||
|
2. Click "Edit" button
|
||||||
|
3. Modify trigger words field
|
||||||
|
4. Add/remove tags
|
||||||
|
5. Save changes
|
||||||
|
6. Refresh page
|
||||||
|
7. Reopen the same LoRA
|
||||||
|
|
||||||
|
**Expected Result**: Changes persist after refresh.
|
||||||
|
|
||||||
|
### Scenario: Delete Model
|
||||||
|
|
||||||
|
**Objective**: Verify model deletion works.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Open a LoRA's details
|
||||||
|
2. Click "Delete" button
|
||||||
|
3. Confirm deletion in dialog
|
||||||
|
4. Wait for removal
|
||||||
|
|
||||||
|
**Expected Result**: Model removed from list, success message shown.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Recipes
|
||||||
|
|
||||||
|
### Scenario: Recipe List Display
|
||||||
|
|
||||||
|
**Objective**: Verify recipes page loads and displays recipes.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to `http://127.0.0.1:8188/recipes`
|
||||||
|
2. Wait for "Recipes" title
|
||||||
|
3. Take snapshot
|
||||||
|
|
||||||
|
**Expected Result**: Recipe list displayed with cards/items.
|
||||||
|
|
||||||
|
### Scenario: Create New Recipe
|
||||||
|
|
||||||
|
**Objective**: Verify recipe creation workflow.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to recipes page
|
||||||
|
2. Click "New Recipe" button
|
||||||
|
3. Fill recipe form:
|
||||||
|
- Name: "Test Recipe"
|
||||||
|
- Description: "E2E test recipe"
|
||||||
|
- Add LoRA models
|
||||||
|
4. Save recipe
|
||||||
|
5. Verify recipe appears in list
|
||||||
|
|
||||||
|
**Expected Result**: New recipe created and displayed.
|
||||||
|
|
||||||
|
### Scenario: Apply Recipe
|
||||||
|
|
||||||
|
**Objective**: Verify applying a recipe to ComfyUI.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Open a recipe
|
||||||
|
2. Click "Apply" or "Load in ComfyUI"
|
||||||
|
3. Verify action completes
|
||||||
|
|
||||||
|
**Expected Result**: Recipe applied successfully.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Settings
|
||||||
|
|
||||||
|
### Scenario: Settings Page Load
|
||||||
|
|
||||||
|
**Objective**: Verify settings page displays correctly.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to `http://127.0.0.1:8188/settings`
|
||||||
|
2. Wait for "Settings" title
|
||||||
|
3. Take snapshot
|
||||||
|
|
||||||
|
**Expected Result**: Settings form with various options displayed.
|
||||||
|
|
||||||
|
### Scenario: Change Setting and Restart
|
||||||
|
|
||||||
|
**Objective**: Verify settings persist after restart.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to settings page
|
||||||
|
2. Change a setting (e.g., default view mode)
|
||||||
|
3. Save settings
|
||||||
|
4. Restart server: `python scripts/start_server.py --restart --wait`
|
||||||
|
5. Refresh browser page
|
||||||
|
6. Navigate to settings
|
||||||
|
|
||||||
|
**Expected Result**: Changed setting value persists.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Import/Export
|
||||||
|
|
||||||
|
### Scenario: Export Models List
|
||||||
|
|
||||||
|
**Objective**: Verify export functionality.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Navigate to LoRA list
|
||||||
|
2. Click "Export" button
|
||||||
|
3. Select format (JSON/CSV)
|
||||||
|
4. Download file
|
||||||
|
|
||||||
|
**Expected Result**: File downloaded with correct data.
|
||||||
|
|
||||||
|
### Scenario: Import Models
|
||||||
|
|
||||||
|
**Objective**: Verify import functionality.
|
||||||
|
|
||||||
|
**Steps**:
|
||||||
|
1. Prepare import file
|
||||||
|
2. Navigate to import page
|
||||||
|
3. Upload file
|
||||||
|
4. Verify import results
|
||||||
|
|
||||||
|
**Expected Result**: Models imported successfully, confirmation shown.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API Integration Tests
|
||||||
|
|
||||||
|
### Scenario: Verify API Endpoints
|
||||||
|
|
||||||
|
**Objective**: Verify backend API responds correctly.
|
||||||
|
|
||||||
|
**Test via browser console**:
|
||||||
|
```javascript
|
||||||
|
// List LoRAs
|
||||||
|
fetch('/loras/api/list').then(r => r.json()).then(console.log)
|
||||||
|
|
||||||
|
// Get LoRA details
|
||||||
|
fetch('/loras/api/detail/<id>').then(r => r.json()).then(console.log)
|
||||||
|
|
||||||
|
// Search LoRAs
|
||||||
|
fetch('/loras/api/search?q=test').then(r => r.json()).then(console.log)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Expected Result**: APIs return valid JSON with expected structure.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Console Error Monitoring
|
||||||
|
|
||||||
|
During all tests, monitor browser console for errors:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Check for JavaScript errors
|
||||||
|
messages = list_console_messages(types=["error"])
|
||||||
|
assert len(messages) == 0, f"Console errors found: {messages}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Network Request Verification
|
||||||
|
|
||||||
|
Verify key API calls are made:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# List XHR requests
|
||||||
|
requests = list_network_requests(resourceTypes=["xhr", "fetch"])
|
||||||
|
|
||||||
|
# Look for specific endpoints
|
||||||
|
lora_list_requests = [r for r in requests if "/api/list" in r.get("url", "")]
|
||||||
|
assert len(lora_list_requests) > 0, "LoRA list API not called"
|
||||||
|
```
|
||||||
193
.agents/skills/lora-manager-e2e/scripts/example_e2e_test.py
Executable file
193
.agents/skills/lora-manager-e2e/scripts/example_e2e_test.py
Executable file
@@ -0,0 +1,193 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Example E2E test demonstrating LoRa Manager testing workflow.
|
||||||
|
|
||||||
|
This script shows how to:
|
||||||
|
1. Start the standalone server
|
||||||
|
2. Use Chrome DevTools MCP to interact with the UI
|
||||||
|
3. Verify functionality end-to-end
|
||||||
|
|
||||||
|
Note: This is a template. Actual execution requires Chrome DevTools MCP.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
def run_test():
|
||||||
|
"""Run example E2E test flow."""
|
||||||
|
|
||||||
|
print("=" * 60)
|
||||||
|
print("LoRa Manager E2E Test Example")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
# Step 1: Start server
|
||||||
|
print("\n[1/5] Starting LoRa Manager standalone server...")
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "start_server.py", "--port", "8188", "--wait", "--timeout", "30"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
if result.returncode != 0:
|
||||||
|
print(f"Failed to start server: {result.stderr}")
|
||||||
|
return 1
|
||||||
|
print("Server ready!")
|
||||||
|
|
||||||
|
# Step 2: Open Chrome (manual step - show command)
|
||||||
|
print("\n[2/5] Open Chrome with debug mode:")
|
||||||
|
print("google-chrome --remote-debugging-port=9222 --user-data-dir=/tmp/chrome-lora-manager http://127.0.0.1:8188/loras")
|
||||||
|
print("(In actual test, this would be automated via MCP)")
|
||||||
|
|
||||||
|
# Step 3: Navigate and verify page load
|
||||||
|
print("\n[3/5] Page Load Verification:")
|
||||||
|
print("""
|
||||||
|
MCP Commands to execute:
|
||||||
|
1. navigate_page(type="url", url="http://127.0.0.1:8188/loras")
|
||||||
|
2. wait_for(text="LoRAs", timeout=10000)
|
||||||
|
3. snapshot = take_snapshot()
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Step 4: Test search functionality
|
||||||
|
print("\n[4/5] Search Functionality Test:")
|
||||||
|
print("""
|
||||||
|
MCP Commands to execute:
|
||||||
|
1. fill(uid="search-input", value="test")
|
||||||
|
2. press_key(key="Enter")
|
||||||
|
3. wait_for(text="Results", timeout=5000)
|
||||||
|
4. result = evaluate_script(function="""
|
||||||
|
() => {
|
||||||
|
const cards = document.querySelectorAll('.lora-card');
|
||||||
|
return { count: cards.length };
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Step 5: Verify API
|
||||||
|
print("\n[5/5] API Verification:")
|
||||||
|
print("""
|
||||||
|
MCP Commands to execute:
|
||||||
|
1. api_result = evaluate_script(function="""
|
||||||
|
async () => {
|
||||||
|
const response = await fetch('/loras/api/list');
|
||||||
|
const data = await response.json();
|
||||||
|
return { count: data.length, status: response.status };
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
2. Verify api_result['status'] == 200
|
||||||
|
""")
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Test flow completed!")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def example_restart_flow():
|
||||||
|
"""Example: Testing configuration change that requires restart."""
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Example: Server Restart Flow")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
print("""
|
||||||
|
Scenario: Change setting and verify after restart
|
||||||
|
|
||||||
|
Steps:
|
||||||
|
1. Navigate to settings page
|
||||||
|
- navigate_page(type="url", url="http://127.0.0.1:8188/settings")
|
||||||
|
|
||||||
|
2. Change a setting (e.g., theme)
|
||||||
|
- fill(uid="theme-select", value="dark")
|
||||||
|
- click(uid="save-settings-button")
|
||||||
|
|
||||||
|
3. Restart server
|
||||||
|
- subprocess.run([python, "start_server.py", "--restart", "--wait"])
|
||||||
|
|
||||||
|
4. Refresh browser
|
||||||
|
- navigate_page(type="reload", ignoreCache=True)
|
||||||
|
- wait_for(text="LoRAs", timeout=15000)
|
||||||
|
|
||||||
|
5. Verify setting persisted
|
||||||
|
- navigate_page(type="url", url="http://127.0.0.1:8188/settings")
|
||||||
|
- theme = evaluate_script(function="() => document.querySelector('#theme-select').value")
|
||||||
|
- assert theme == "dark"
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def example_modal_interaction():
|
||||||
|
"""Example: Testing modal dialog interaction."""
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Example: Modal Dialog Interaction")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
print("""
|
||||||
|
Scenario: Add new LoRA via modal
|
||||||
|
|
||||||
|
Steps:
|
||||||
|
1. Open modal
|
||||||
|
- click(uid="add-lora-button")
|
||||||
|
- wait_for(text="Add LoRA", timeout=3000)
|
||||||
|
|
||||||
|
2. Fill form
|
||||||
|
- fill_form(elements=[
|
||||||
|
{"uid": "lora-name", "value": "Test Character"},
|
||||||
|
{"uid": "lora-path", "value": "/models/test.safetensors"},
|
||||||
|
])
|
||||||
|
|
||||||
|
3. Submit
|
||||||
|
- click(uid="modal-submit-button")
|
||||||
|
|
||||||
|
4. Verify success
|
||||||
|
- wait_for(text="Successfully added", timeout=5000)
|
||||||
|
- snapshot = take_snapshot()
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def example_network_monitoring():
|
||||||
|
"""Example: Network request monitoring."""
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Example: Network Request Monitoring")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
print("""
|
||||||
|
Scenario: Verify API calls during user interaction
|
||||||
|
|
||||||
|
Steps:
|
||||||
|
1. Clear network log (implicit on navigation)
|
||||||
|
- navigate_page(type="url", url="http://127.0.0.1:8188/loras")
|
||||||
|
|
||||||
|
2. Perform action that triggers API call
|
||||||
|
- fill(uid="search-input", value="character")
|
||||||
|
- press_key(key="Enter")
|
||||||
|
|
||||||
|
3. List network requests
|
||||||
|
- requests = list_network_requests(resourceTypes=["xhr", "fetch"])
|
||||||
|
|
||||||
|
4. Find search API call
|
||||||
|
- search_requests = [r for r in requests if "/api/search" in r.get("url", "")]
|
||||||
|
- assert len(search_requests) > 0, "Search API was not called"
|
||||||
|
|
||||||
|
5. Get request details
|
||||||
|
- if search_requests:
|
||||||
|
details = get_network_request(reqid=search_requests[0]["reqid"])
|
||||||
|
- Verify request method, response status, etc.
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("LoRa Manager E2E Test Examples\n")
|
||||||
|
print("This script demonstrates E2E testing patterns.\n")
|
||||||
|
print("Note: Actual execution requires Chrome DevTools MCP connection.\n")
|
||||||
|
|
||||||
|
run_test()
|
||||||
|
example_restart_flow()
|
||||||
|
example_modal_interaction()
|
||||||
|
example_network_monitoring()
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("All examples shown!")
|
||||||
|
print("=" * 60)
|
||||||
169
.agents/skills/lora-manager-e2e/scripts/start_server.py
Executable file
169
.agents/skills/lora-manager-e2e/scripts/start_server.py
Executable file
@@ -0,0 +1,169 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Start or restart LoRa Manager standalone server for E2E testing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
import signal
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def find_server_process(port: int) -> list[int]:
|
||||||
|
"""Find PIDs of processes listening on the given port."""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["lsof", "-ti", f":{port}"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=False
|
||||||
|
)
|
||||||
|
if result.returncode == 0 and result.stdout.strip():
|
||||||
|
return [int(pid) for pid in result.stdout.strip().split("\n") if pid]
|
||||||
|
except FileNotFoundError:
|
||||||
|
# lsof not available, try netstat
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["netstat", "-tlnp"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=False
|
||||||
|
)
|
||||||
|
pids = []
|
||||||
|
for line in result.stdout.split("\n"):
|
||||||
|
if f":{port}" in line:
|
||||||
|
parts = line.split()
|
||||||
|
for part in parts:
|
||||||
|
if "/" in part:
|
||||||
|
try:
|
||||||
|
pid = int(part.split("/")[0])
|
||||||
|
pids.append(pid)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return pids
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def kill_server(port: int) -> None:
|
||||||
|
"""Kill processes using the specified port."""
|
||||||
|
pids = find_server_process(port)
|
||||||
|
for pid in pids:
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGTERM)
|
||||||
|
print(f"Sent SIGTERM to process {pid}")
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Wait for processes to terminate
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# Force kill if still running
|
||||||
|
pids = find_server_process(port)
|
||||||
|
for pid in pids:
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGKILL)
|
||||||
|
print(f"Sent SIGKILL to process {pid}")
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def is_server_ready(port: int, timeout: float = 0.5) -> bool:
|
||||||
|
"""Check if server is accepting connections."""
|
||||||
|
try:
|
||||||
|
with socket.create_connection(("127.0.0.1", port), timeout=timeout):
|
||||||
|
return True
|
||||||
|
except (socket.timeout, ConnectionRefusedError, OSError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_server(port: int, timeout: int = 30) -> bool:
|
||||||
|
"""Wait for server to become ready."""
|
||||||
|
start = time.time()
|
||||||
|
while time.time() - start < timeout:
|
||||||
|
if is_server_ready(port):
|
||||||
|
return True
|
||||||
|
time.sleep(0.5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Start LoRa Manager standalone server for E2E testing"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--port",
|
||||||
|
type=int,
|
||||||
|
default=8188,
|
||||||
|
help="Server port (default: 8188)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--restart",
|
||||||
|
action="store_true",
|
||||||
|
help="Kill existing server before starting"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--wait",
|
||||||
|
action="store_true",
|
||||||
|
help="Wait for server to be ready before exiting"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--timeout",
|
||||||
|
type=int,
|
||||||
|
default=30,
|
||||||
|
help="Timeout for waiting (default: 30)"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Get project root (parent of .agents directory)
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
skill_dir = os.path.dirname(script_dir)
|
||||||
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(skill_dir)))
|
||||||
|
|
||||||
|
# Restart if requested
|
||||||
|
if args.restart:
|
||||||
|
print(f"Killing existing server on port {args.port}...")
|
||||||
|
kill_server(args.port)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# Check if already running
|
||||||
|
if is_server_ready(args.port):
|
||||||
|
print(f"Server already running on port {args.port}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Start server
|
||||||
|
print(f"Starting LoRa Manager standalone server on port {args.port}...")
|
||||||
|
cmd = [sys.executable, "standalone.py", "--port", str(args.port)]
|
||||||
|
|
||||||
|
# Start in background
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
cwd=project_root,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
start_new_session=True
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"Server process started with PID {process.pid}")
|
||||||
|
|
||||||
|
# Wait for ready if requested
|
||||||
|
if args.wait:
|
||||||
|
print(f"Waiting for server to be ready (timeout: {args.timeout}s)...")
|
||||||
|
if wait_for_server(args.port, args.timeout):
|
||||||
|
print(f"Server ready at http://127.0.0.1:{args.port}/loras")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
print(f"Timeout waiting for server")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f"Server starting at http://127.0.0.1:{args.port}/loras")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
61
.agents/skills/lora-manager-e2e/scripts/wait_for_server.py
Executable file
61
.agents/skills/lora-manager-e2e/scripts/wait_for_server.py
Executable file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Wait for LoRa Manager server to become ready.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
def is_server_ready(port: int, timeout: float = 0.5) -> bool:
|
||||||
|
"""Check if server is accepting connections."""
|
||||||
|
try:
|
||||||
|
with socket.create_connection(("127.0.0.1", port), timeout=timeout):
|
||||||
|
return True
|
||||||
|
except (socket.timeout, ConnectionRefusedError, OSError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_server(port: int, timeout: int = 30) -> bool:
|
||||||
|
"""Wait for server to become ready."""
|
||||||
|
start = time.time()
|
||||||
|
while time.time() - start < timeout:
|
||||||
|
if is_server_ready(port):
|
||||||
|
return True
|
||||||
|
time.sleep(0.5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Wait for LoRa Manager server to become ready"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--port",
|
||||||
|
type=int,
|
||||||
|
default=8188,
|
||||||
|
help="Server port (default: 8188)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--timeout",
|
||||||
|
type=int,
|
||||||
|
default=30,
|
||||||
|
help="Timeout in seconds (default: 30)"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
print(f"Waiting for server on port {args.port} (timeout: {args.timeout}s)...")
|
||||||
|
|
||||||
|
if wait_for_server(args.port, args.timeout):
|
||||||
|
print(f"Server ready at http://127.0.0.1:{args.port}/loras")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
print(f"Timeout: Server not ready after {args.timeout}s")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
@@ -34,6 +34,15 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
|||||||
|
|
||||||
## Release Notes
|
## Release Notes
|
||||||
|
|
||||||
|
### v0.9.15
|
||||||
|
* **Filter Presets** - Save filter combinations as presets for quick switching and reapplication.
|
||||||
|
* **Bug Fixes** - Fixed various bugs for improved stability.
|
||||||
|
|
||||||
|
### v0.9.14
|
||||||
|
* **LoRA Cycler Node** - Introduced a new LoRA Cycler node that enables iteration through specified LoRAs with support for repeat count and pause iteration functionality. Refer to the new "Lora Cycler" template workflow for concrete example.
|
||||||
|
* **Enhanced Prompt Node with Tag Autocomplete** - Enhanced the Prompt node with comprehensive tag autocomplete based on merged Danbooru + e621 tags. Supports tag search and autocomplete functionality. Implemented a command system with shortcuts like `/char` or `/artist` for category-specific tag searching. Added `/ac` or `/noac` commands to quickly enable or disable autocomplete. Refer to the "Lora Manager Basic" template workflow in ComfyUI -> Templates -> ComfyUI-Lora-Manager for detailed tips.
|
||||||
|
* **Bug Fixes & Stability** - Addressed multiple bugs and improved overall stability.
|
||||||
|
|
||||||
### v0.9.12
|
### v0.9.12
|
||||||
* **LoRA Randomizer System** - Introduced a comprehensive LoRA randomization system featuring LoRA Pool and LoRA Randomizer nodes for flexible and dynamic generation workflows.
|
* **LoRA Randomizer System** - Introduced a comprehensive LoRA randomization system featuring LoRA Pool and LoRA Randomizer nodes for flexible and dynamic generation workflows.
|
||||||
* **LoRA Randomizer Template** - Refer to the new "LoRA Randomizer" template workflow for detailed examples of flexible randomization modes, lock & reuse options, and other features.
|
* **LoRA Randomizer Template** - Refer to the new "LoRA Randomizer" template workflow for detailed examples of flexible randomization modes, lock & reuse options, and other features.
|
||||||
|
|||||||
449
docs/plan/model-modal-redesign.md
Normal file
449
docs/plan/model-modal-redesign.md
Normal file
@@ -0,0 +1,449 @@
|
|||||||
|
# Model Modal UI/UX 重构计划
|
||||||
|
|
||||||
|
> **Status**: Phase 1 Complete ✓
|
||||||
|
> **Created**: 2026-02-06
|
||||||
|
> **Target**: v2.x Release
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. 项目概述
|
||||||
|
|
||||||
|
### 1.1 背景与问题
|
||||||
|
|
||||||
|
当前 Model Modal 存在以下 UX 问题:
|
||||||
|
|
||||||
|
1. **空间利用率低** - 固定 800px 宽度,大屏环境下大量留白
|
||||||
|
2. **Tab 切换繁琐** - 4 个 Tab(Examples/Description/Versions/Recipes)隐藏了重要信息
|
||||||
|
3. **Examples 浏览不便** - 需持续向下滚动,无快速导航
|
||||||
|
4. **添加自定义示例困难** - 需滚动到底部,操作路径长
|
||||||
|
|
||||||
|
### 1.2 设计目标
|
||||||
|
|
||||||
|
- **空间效率**: 利用 header 以下、sidebar 右侧的全部可用空间
|
||||||
|
- **浏览体验**: 类似 Midjourney 的沉浸式图片浏览
|
||||||
|
- **信息架构**: 关键元数据固定可见,次要信息可折叠
|
||||||
|
- **操作效率**: 直觉化的键盘导航,减少点击次数
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. 设计方案
|
||||||
|
|
||||||
|
### 2.1 布局架构: Split-View Overlay
|
||||||
|
|
||||||
|
```
|
||||||
|
┌──────────────────────────────────────────────────────────────────────┐
|
||||||
|
│ HEADER (保持现有) │
|
||||||
|
├──────────┬───────────────────────────────────────────────────────────┤
|
||||||
|
│ │ ┌───────────────────────────┬────────────────────────┐ │
|
||||||
|
│ FOLDER │ │ │ MODEL HEADER │ │
|
||||||
|
│ SIDEBAR │ │ EXAMPLES SHOWCASE │ ├─ Name │ │
|
||||||
|
│ (可折叠) │ │ │ ├─ Creator + Actions │ │
|
||||||
|
│ │ │ ┌─────────────────┐ │ ├─ Tags │ │
|
||||||
|
│ │ │ │ │ ├────────────────────────┤ │
|
||||||
|
│ │ │ │ MAIN IMAGE │ │ COMPACT METADATA │ │
|
||||||
|
│ │ │ │ (自适应高度) │ │ ├─ Ver | Base | Size │ │
|
||||||
|
│ │ │ │ │ │ ├─ Location │ │
|
||||||
|
│ │ │ └─────────────────┘ │ ├─ Usage Tips │ │
|
||||||
|
│ │ │ │ ├─ Trigger Words │ │
|
||||||
|
│ │ │ [PARAMS PREVIEW] │ ├─ Notes │ │
|
||||||
|
│ │ │ (Prompt + Copy) ├────────────────────────┤ │
|
||||||
|
│ │ │ │ CONTENT TABS │ │
|
||||||
|
│ │ │ ┌─────────────────┐ │ [Desc][Versions][Rec] │ │
|
||||||
|
│ │ │ │ THUMBNAIL RAIL │ │ │ │
|
||||||
|
│ │ │ │ [1][2][3][4][+]│ │ TAB CONTENT AREA │ │
|
||||||
|
│ │ │ └─────────────────┘ │ (Accordion / List) │ │
|
||||||
|
│ │ └───────────────────────────┴────────────────────────┘ │
|
||||||
|
└──────────┴───────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**尺寸规格**:
|
||||||
|
- Sidebar 展开: Left 60% | Right 40%
|
||||||
|
- Sidebar 折叠: Left 65% | Right 35%
|
||||||
|
- 最小宽度: 1200px (低于此值触发移动端适配)
|
||||||
|
|
||||||
|
### 2.2 左侧: Examples Showcase
|
||||||
|
|
||||||
|
#### 2.2.1 组件结构
|
||||||
|
|
||||||
|
| 组件 | 描述 | 优先级 |
|
||||||
|
|------|------|--------|
|
||||||
|
| Main Image | 自适应容器,保持原始比例,最大高度 70vh | P0 |
|
||||||
|
| Params Panel | 底部滑出面板,显示 Prompt/Negative/Params | P0 |
|
||||||
|
| Thumbnail Rail | 底部横向滚动条,支持点击跳转 | P0 |
|
||||||
|
| Add Button | Rail 最右侧 "+" 按钮,打开上传区 | P0 |
|
||||||
|
| Nav Arrows | 图片左右两侧悬停显示 | P1 |
|
||||||
|
|
||||||
|
#### 2.2.2 图片悬停操作
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────┐
|
||||||
|
│ [👁] [📌] [🗑] │ ← 查看参数 | 设为预览 | 删除
|
||||||
|
│ │
|
||||||
|
│ IMAGE │
|
||||||
|
│ │
|
||||||
|
└─────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.2.3 键盘导航
|
||||||
|
|
||||||
|
| 按键 | 功能 | 说明 |
|
||||||
|
|------|------|------|
|
||||||
|
| ← | 上一个 Example | 循环(首张时到最后一张) |
|
||||||
|
| → | 下一个 Example | 循环(末张时到第一张) |
|
||||||
|
| I | Toggle Params Panel | 显示/隐藏图片参数 |
|
||||||
|
| C | Copy Prompt | 复制当前 Prompt 到剪贴板 |
|
||||||
|
|
||||||
|
### 2.3 右侧: Metadata + Content
|
||||||
|
|
||||||
|
#### 2.3.1 固定头部 (不可折叠)
|
||||||
|
|
||||||
|
```
|
||||||
|
┌────────────────────────┐
|
||||||
|
│ MODEL NAME [×] │
|
||||||
|
│ [👤 Creator] [🌐 Civ] │
|
||||||
|
│ [tag1] [tag2] [tag3] │
|
||||||
|
├────────────────────────┤
|
||||||
|
│ Ver: v1.0 Size: 96MB │
|
||||||
|
│ Base: SDXL │
|
||||||
|
│ 📁 /path/to/file │
|
||||||
|
├────────────────────────┤
|
||||||
|
│ USAGE TIPS [✏️] │
|
||||||
|
│ [strength: 0.8] [+] │
|
||||||
|
├────────────────────────┤
|
||||||
|
│ TRIGGER WORDS [✏️] │
|
||||||
|
│ [word1] [word2] [📋] │
|
||||||
|
├────────────────────────┤
|
||||||
|
│ NOTES [✏️] │
|
||||||
|
│ "Add your notes..." │
|
||||||
|
└────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.3.2 Tabs 设计
|
||||||
|
|
||||||
|
保留横向 Tab 切换,但优化内容展示:
|
||||||
|
|
||||||
|
| Tab | 内容 | 交互方式 |
|
||||||
|
|-----|------|----------|
|
||||||
|
| Description | About this version + Model Description | Accordion 折叠 |
|
||||||
|
| Versions | 版本列表卡片 | 完整列表视图 |
|
||||||
|
| Recipes | Recipe 卡片网格 | 网格布局 |
|
||||||
|
|
||||||
|
**Accordion 行为**:
|
||||||
|
- 手风琴模式:同时只能展开一个 section
|
||||||
|
- 默认:About this version 展开,Description 折叠
|
||||||
|
- 动画:300ms ease-out
|
||||||
|
|
||||||
|
### 2.4 全局导航
|
||||||
|
|
||||||
|
#### 2.4.1 Model 切换
|
||||||
|
|
||||||
|
| 按键 | 功能 |
|
||||||
|
|------|------|
|
||||||
|
| ↑ | 上一个 Model |
|
||||||
|
| ↓ | 下一个 Model |
|
||||||
|
|
||||||
|
**切换动画**:
|
||||||
|
1. 当前 Modal 淡出 (150ms)
|
||||||
|
2. 加载新 Model 数据
|
||||||
|
3. 新 Modal 淡入 (150ms)
|
||||||
|
4. 保持当前 Tab 状态(不重置到默认)
|
||||||
|
|
||||||
|
#### 2.4.2 首次使用提示
|
||||||
|
|
||||||
|
Modal 首次打开时,顶部显示提示条:
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ 💡 Tip: ↑↓ 切换模型 | ←→ 浏览示例 | I 查看参数 | ESC 关闭 │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
- 3 秒后自动淡出
|
||||||
|
- 提供 "不再显示" 选项
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. 技术实现
|
||||||
|
|
||||||
|
### 3.1 文件结构变更
|
||||||
|
|
||||||
|
```
|
||||||
|
static/
|
||||||
|
├── js/
|
||||||
|
│ └── components/
|
||||||
|
│ └── model-modal/ # 新目录
|
||||||
|
│ ├── index.js # 主入口
|
||||||
|
│ ├── ModelModal.js # Modal 容器
|
||||||
|
│ ├── ExampleShowcase.js # 左侧展示
|
||||||
|
│ ├── ThumbnailRail.js # 缩略图导航
|
||||||
|
│ ├── MetadataPanel.js # 右侧元数据
|
||||||
|
│ ├── ContentTabs.js # Tabs 容器
|
||||||
|
│ └── accordions/ # Accordion 组件
|
||||||
|
│ ├── DescriptionAccordion.js
|
||||||
|
│ └── VersionsList.js
|
||||||
|
├── css/
|
||||||
|
│ └── components/
|
||||||
|
│ └── model-modal/ # 新目录
|
||||||
|
│ ├── modal-overlay.css
|
||||||
|
│ ├── showcase.css
|
||||||
|
│ ├── thumbnail-rail.css
|
||||||
|
│ ├── metadata.css
|
||||||
|
│ └── tabs.css
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.2 核心 CSS 架构
|
||||||
|
|
||||||
|
```css
|
||||||
|
/* modal-overlay.css */
|
||||||
|
.model-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: var(--header-height);
|
||||||
|
left: var(--sidebar-width, 250px);
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
z-index: var(--z-modal);
|
||||||
|
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 1.2fr 0.8fr;
|
||||||
|
gap: 0;
|
||||||
|
|
||||||
|
background: var(--bg-color);
|
||||||
|
animation: modalSlideIn 0.2s ease-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.sidebar-collapsed {
|
||||||
|
left: var(--sidebar-collapsed-width, 60px);
|
||||||
|
grid-template-columns: 1.3fr 0.7fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 移动端适配 */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.model-overlay {
|
||||||
|
left: 0;
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
grid-template-rows: auto 1fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3 响应式断点
|
||||||
|
|
||||||
|
| 断点 | 布局 | 说明 |
|
||||||
|
|------|------|------|
|
||||||
|
| > 1400px | Split 60/40 | 大屏优化 |
|
||||||
|
| 1200-1400px | Split 50/50 | 标准桌面 |
|
||||||
|
| 768-1200px | Split 50/50 | 小屏桌面/平板 |
|
||||||
|
| < 768px | Stack | 移动端:Examples 在上,Metadata 在下 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. 实施阶段
|
||||||
|
|
||||||
|
### Phase 1: 核心重构 (预计 2-3 周)
|
||||||
|
|
||||||
|
**目标**: MVP 可用,基础功能完整
|
||||||
|
|
||||||
|
**任务清单**:
|
||||||
|
|
||||||
|
- [ ] 创建新的文件结构和基础组件
|
||||||
|
- [ ] 实现 Split-View Overlay 布局
|
||||||
|
- [ ] CSS Grid 布局系统
|
||||||
|
- [ ] Sidebar 状态联动
|
||||||
|
- [ ] 响应式断点处理
|
||||||
|
- [ ] 迁移左侧 Examples 区域
|
||||||
|
- [ ] Main Image 自适应容器
|
||||||
|
- [ ] Thumbnail Rail 组件
|
||||||
|
- [ ] Params Panel 滑出动画
|
||||||
|
- [ ] 实现新的快捷键系统
|
||||||
|
- [ ] ↑↓ 切换 Model
|
||||||
|
- [ ] ←→ 切换 Example
|
||||||
|
- [ ] I/C/ESC 功能键
|
||||||
|
- [ ] 移除旧 Modal 的 max-width 限制
|
||||||
|
- [ ] 基础动画过渡
|
||||||
|
|
||||||
|
**验收标准**:
|
||||||
|
- [ ] 新布局在各种屏幕尺寸下正常显示
|
||||||
|
- [ ] 键盘导航正常工作
|
||||||
|
- [ ] 无阻塞性 Bug
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 2: 体验优化 (预计 1-2 周)
|
||||||
|
|
||||||
|
**目标**: 信息架构优化,交互细节完善
|
||||||
|
|
||||||
|
**任务清单**:
|
||||||
|
|
||||||
|
- [ ] Accordion 组件实现
|
||||||
|
- [ ] Description Tab 的折叠面板
|
||||||
|
- [ ] 手风琴交互逻辑
|
||||||
|
- [ ] 动画优化
|
||||||
|
- [ ] 右侧 Metadata 区域固定化
|
||||||
|
- [ ] 滚动行为优化
|
||||||
|
- [ ] 编辑功能迁移
|
||||||
|
- [ ] Example 添加流程优化
|
||||||
|
- [ ] Rail 上的 "+" 按钮
|
||||||
|
- [ ] Inline Upload Area
|
||||||
|
- [ ] 拖拽上传支持
|
||||||
|
- [ ] Model 切换动画优化
|
||||||
|
- [ ] 淡入淡出效果
|
||||||
|
- [ ] 加载状态指示
|
||||||
|
- [ ] 首次使用提示
|
||||||
|
|
||||||
|
**验收标准**:
|
||||||
|
- [ ] Accordion 交互流畅
|
||||||
|
- [ ] 添加 Example 操作路径 < 2 步
|
||||||
|
- [ ] Model 切换视觉反馈清晰
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 3: 功能完整化 (预计 1-2 周)
|
||||||
|
|
||||||
|
**目标**: 所有现有功能迁移完成
|
||||||
|
|
||||||
|
**任务清单**:
|
||||||
|
|
||||||
|
- [ ] Versions Tab 完整实现
|
||||||
|
- [ ] 版本列表卡片
|
||||||
|
- [ ] 下载/忽略/删除操作
|
||||||
|
- [ ] 更新状态 Badge
|
||||||
|
- [ ] Recipes Tab 完整实现
|
||||||
|
- [ ] Recipe 卡片网格
|
||||||
|
- [ ] 复制/应用操作
|
||||||
|
- [ ] Tab 状态保持
|
||||||
|
- [ ] 切换 Model 时保持当前 Tab
|
||||||
|
- [ ] Tab 内容滚动位置记忆
|
||||||
|
- [ ] 所有编辑功能迁移
|
||||||
|
- [ ] Model Name 编辑
|
||||||
|
- [ ] Base Model 编辑
|
||||||
|
- [ ] File Name 编辑
|
||||||
|
- [ ] Tags 编辑
|
||||||
|
- [ ] Usage Tips 编辑
|
||||||
|
- [ ] Notes 编辑
|
||||||
|
|
||||||
|
**验收标准**:
|
||||||
|
- [ ] 所有现有功能可用
|
||||||
|
- [ ] 单元测试覆盖率 > 80%
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 4: 打磨与优化 (预计 1 周)
|
||||||
|
|
||||||
|
**目标**: 性能优化,边缘 case 处理
|
||||||
|
|
||||||
|
**任务清单**:
|
||||||
|
|
||||||
|
- [ ] 移动端适配完善
|
||||||
|
- [ ] Stack 布局优化
|
||||||
|
- [ ] 触摸手势支持(滑动切换)
|
||||||
|
- [ ] 性能优化
|
||||||
|
- [ ] 图片懒加载优化
|
||||||
|
- [ ] 虚拟滚动(大量 Examples 时)
|
||||||
|
- [ ] 减少重渲染
|
||||||
|
- [ ] 无障碍支持
|
||||||
|
- [ ] ARIA 标签
|
||||||
|
- [ ] 键盘导航焦点管理
|
||||||
|
- [ ] 屏幕阅读器测试
|
||||||
|
- [ ] 动画性能优化
|
||||||
|
- [ ] will-change 优化
|
||||||
|
- [ ] 减少 layout thrashing
|
||||||
|
|
||||||
|
**验收标准**:
|
||||||
|
- [ ] Lighthouse Performance > 90
|
||||||
|
- [ ] 无障碍检查无严重问题
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 5: 发布准备 (预计 3-5 天)
|
||||||
|
|
||||||
|
**目标**: 稳定版本,文档完整
|
||||||
|
|
||||||
|
**任务清单**:
|
||||||
|
|
||||||
|
- [ ] Bug 修复
|
||||||
|
- [ ] 用户测试
|
||||||
|
- [ ] 更新文档
|
||||||
|
- [ ] README 更新
|
||||||
|
- [ ] 快捷键说明
|
||||||
|
- [ ] 截图/GIF 演示
|
||||||
|
- [ ] 发布说明
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. 风险与应对
|
||||||
|
|
||||||
|
| 风险 | 影响 | 应对策略 |
|
||||||
|
|------|------|----------|
|
||||||
|
| 用户不适应新布局 | 中 | 提供设置选项,允许切换回旧版(临时) |
|
||||||
|
| 性能问题(大量 Examples) | 高 | Phase 4 重点优化,必要时虚拟滚动 |
|
||||||
|
| 移动端体验不佳 | 中 | 单独设计移动端布局,非简单缩放 |
|
||||||
|
| 与现有扩展冲突 | 低 | 充分的回归测试 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. 关联文件
|
||||||
|
|
||||||
|
### 6.1 需修改的现有文件
|
||||||
|
|
||||||
|
```
|
||||||
|
static/js/components/shared/ModelModal.js # 完全重构
|
||||||
|
static/js/components/shared/showcase/ # 迁移至新目录
|
||||||
|
static/css/components/lora-modal/ # 样式重写
|
||||||
|
static/css/components/modal/_base.css # Overlay 样式调整
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6.2 参考资源
|
||||||
|
|
||||||
|
- [Midjourney Explore](https://www.midjourney.com/explore) - 交互参考
|
||||||
|
- [Pinterest Pin View](https://www.pinterest.com) - 布局参考
|
||||||
|
- [AGENTS.md](/AGENTS.md) - 项目代码规范
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Checklist
|
||||||
|
|
||||||
|
### 7.1 启动前
|
||||||
|
|
||||||
|
- [ ] 创建 feature branch: `feature/model-modal-redesign`
|
||||||
|
- [ ] 设置开发环境
|
||||||
|
- [ ] 准备测试数据集(多种 Model 类型)
|
||||||
|
|
||||||
|
### 7.2 每个 Phase 完成时
|
||||||
|
|
||||||
|
- [ ] 代码审查
|
||||||
|
- [ ] 功能测试
|
||||||
|
- [ ] 更新本文档状态
|
||||||
|
|
||||||
|
### 7.3 发布前
|
||||||
|
|
||||||
|
- [ ] 完整回归测试
|
||||||
|
- [ ] 更新 CHANGELOG
|
||||||
|
- [ ] 更新版本号
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. 附录
|
||||||
|
|
||||||
|
### 8.1 命名规范
|
||||||
|
|
||||||
|
| 类型 | 规范 | 示例 |
|
||||||
|
|------|------|------|
|
||||||
|
| 文件 | kebab-case | `thumbnail-rail.js` |
|
||||||
|
| 组件 | PascalCase | `ThumbnailRail` |
|
||||||
|
| CSS 类 | BEM | `.thumbnail-rail__item--active` |
|
||||||
|
| 变量 | camelCase | `currentExampleIndex` |
|
||||||
|
|
||||||
|
### 8.2 颜色规范
|
||||||
|
|
||||||
|
使用现有 CSS 变量,不引入新颜色:
|
||||||
|
|
||||||
|
```css
|
||||||
|
--lora-accent: #4299e1;
|
||||||
|
--lora-accent-l: 60%;
|
||||||
|
--lora-accent-c: 0.2;
|
||||||
|
--lora-accent-h: 250;
|
||||||
|
--lora-surface: var(--card-bg);
|
||||||
|
--lora-border: var(--border-color);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Last Updated: 2026-02-06*
|
||||||
File diff suppressed because one or more lines are too long
@@ -9,9 +9,9 @@
|
|||||||
"back": "Zurück",
|
"back": "Zurück",
|
||||||
"next": "Weiter",
|
"next": "Weiter",
|
||||||
"backToTop": "Nach oben",
|
"backToTop": "Nach oben",
|
||||||
"add": "Hinzufügen",
|
|
||||||
"settings": "Einstellungen",
|
"settings": "Einstellungen",
|
||||||
"help": "Hilfe"
|
"help": "Hilfe",
|
||||||
|
"add": "Hinzufügen"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
"loading": "Wird geladen...",
|
"loading": "Wird geladen...",
|
||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "Kein Credit erforderlich",
|
"noCreditRequired": "Kein Credit erforderlich",
|
||||||
"allowSellingGeneratedContent": "Verkauf erlaubt",
|
"allowSellingGeneratedContent": "Verkauf erlaubt",
|
||||||
"noTags": "Keine Tags",
|
"noTags": "Keine Tags",
|
||||||
"clearAll": "Alle Filter löschen"
|
"clearAll": "Alle Filter löschen",
|
||||||
|
"any": "Beliebig",
|
||||||
|
"all": "Alle",
|
||||||
|
"tagLogicAny": "Jedes Tag abgleichen (ODER)",
|
||||||
|
"tagLogicAll": "Alle Tags abgleichen (UND)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "Theme wechseln",
|
"toggle": "Theme wechseln",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Cache-Korruption erkannt"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Cache-Probleme erkannt"
|
||||||
|
},
|
||||||
|
"content": "{invalid} von {total} Cache-Einträgen sind ungültig ({rate}). Dies kann zu fehlenden Modellen oder Fehlern führen. Ein Neuaufbau des Caches wird empfohlen.",
|
||||||
|
"rebuildCache": "Cache neu aufbauen",
|
||||||
|
"dismiss": "Verwerfen",
|
||||||
|
"rebuilding": "Cache wird neu aufgebaut...",
|
||||||
|
"rebuildFailed": "Fehler beim Neuaufbau des Caches: {error}",
|
||||||
|
"retry": "Wiederholen"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "No Credit Required",
|
"noCreditRequired": "No Credit Required",
|
||||||
"allowSellingGeneratedContent": "Allow Selling",
|
"allowSellingGeneratedContent": "Allow Selling",
|
||||||
"noTags": "No tags",
|
"noTags": "No tags",
|
||||||
"clearAll": "Clear All Filters"
|
"clearAll": "Clear All Filters",
|
||||||
|
"any": "Any",
|
||||||
|
"all": "All",
|
||||||
|
"tagLogicAny": "Match any tag (OR)",
|
||||||
|
"tagLogicAll": "Match all tags (AND)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "Toggle theme",
|
"toggle": "Toggle theme",
|
||||||
@@ -907,7 +911,12 @@
|
|||||||
"viewOnCivitai": "View on Civitai",
|
"viewOnCivitai": "View on Civitai",
|
||||||
"viewOnCivitaiText": "View on Civitai",
|
"viewOnCivitaiText": "View on Civitai",
|
||||||
"viewCreatorProfile": "View Creator Profile",
|
"viewCreatorProfile": "View Creator Profile",
|
||||||
"openFileLocation": "Open File Location"
|
"openFileLocation": "Open File Location",
|
||||||
|
"viewParams": "View parameters",
|
||||||
|
"setPreview": "Set as preview",
|
||||||
|
"previewSet": "Preview updated successfully",
|
||||||
|
"previewFailed": "Failed to update preview",
|
||||||
|
"delete": "Delete"
|
||||||
},
|
},
|
||||||
"openFileLocation": {
|
"openFileLocation": {
|
||||||
"success": "File location opened successfully",
|
"success": "File location opened successfully",
|
||||||
@@ -926,13 +935,15 @@
|
|||||||
"additionalNotes": "Additional Notes",
|
"additionalNotes": "Additional Notes",
|
||||||
"notesHint": "Press Enter to save, Shift+Enter for new line",
|
"notesHint": "Press Enter to save, Shift+Enter for new line",
|
||||||
"addNotesPlaceholder": "Add your notes here...",
|
"addNotesPlaceholder": "Add your notes here...",
|
||||||
"aboutThisVersion": "About this version"
|
"aboutThisVersion": "About this version",
|
||||||
|
"triggerWords": "Trigger Words"
|
||||||
},
|
},
|
||||||
"notes": {
|
"notes": {
|
||||||
"saved": "Notes saved successfully",
|
"saved": "Notes saved successfully",
|
||||||
"saveFailed": "Failed to save notes"
|
"saveFailed": "Failed to save notes"
|
||||||
},
|
},
|
||||||
"usageTips": {
|
"usageTips": {
|
||||||
|
"add": "Add",
|
||||||
"addPresetParameter": "Add preset parameter...",
|
"addPresetParameter": "Add preset parameter...",
|
||||||
"strengthMin": "Strength Min",
|
"strengthMin": "Strength Min",
|
||||||
"strengthMax": "Strength Max",
|
"strengthMax": "Strength Max",
|
||||||
@@ -941,17 +952,24 @@
|
|||||||
"clipStrength": "Clip Strength",
|
"clipStrength": "Clip Strength",
|
||||||
"clipSkip": "Clip Skip",
|
"clipSkip": "Clip Skip",
|
||||||
"valuePlaceholder": "Value",
|
"valuePlaceholder": "Value",
|
||||||
"add": "Add",
|
|
||||||
"invalidRange": "Invalid range format. Use x.x-y.y"
|
"invalidRange": "Invalid range format. Use x.x-y.y"
|
||||||
},
|
},
|
||||||
|
"params": {
|
||||||
|
"title": "Generation Parameters",
|
||||||
|
"prompt": "Prompt",
|
||||||
|
"negativePrompt": "Negative Prompt",
|
||||||
|
"noData": "No generation data available",
|
||||||
|
"promptCopied": "Prompt copied to clipboard"
|
||||||
|
},
|
||||||
"triggerWords": {
|
"triggerWords": {
|
||||||
"label": "Trigger Words",
|
"label": "Trigger Words",
|
||||||
"noTriggerWordsNeeded": "No trigger word needed",
|
"noTriggerWordsNeeded": "No trigger words needed",
|
||||||
"edit": "Edit trigger words",
|
"edit": "Edit trigger words",
|
||||||
"cancel": "Cancel editing",
|
"cancel": "Cancel editing",
|
||||||
"save": "Save changes",
|
"save": "Save changes",
|
||||||
"addPlaceholder": "Type to add or click suggestions below",
|
"addPlaceholder": "Type to add trigger word...",
|
||||||
"copyWord": "Copy trigger word",
|
"copyWord": "Copy trigger word",
|
||||||
|
"copyAll": "Copy all trigger words",
|
||||||
"deleteWord": "Delete trigger word",
|
"deleteWord": "Delete trigger word",
|
||||||
"suggestions": {
|
"suggestions": {
|
||||||
"noSuggestions": "No suggestions available",
|
"noSuggestions": "No suggestions available",
|
||||||
@@ -961,6 +979,9 @@
|
|||||||
"wordSuggestions": "Word Suggestions",
|
"wordSuggestions": "Word Suggestions",
|
||||||
"wordsFound": "{count} words found",
|
"wordsFound": "{count} words found",
|
||||||
"loading": "Loading suggestions..."
|
"loading": "Loading suggestions..."
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"duplicate": "This trigger word already exists"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"description": {
|
"description": {
|
||||||
@@ -986,7 +1007,11 @@
|
|||||||
"previousWithShortcut": "Previous model (←)",
|
"previousWithShortcut": "Previous model (←)",
|
||||||
"nextWithShortcut": "Next model (→)",
|
"nextWithShortcut": "Next model (→)",
|
||||||
"noPrevious": "No previous model available",
|
"noPrevious": "No previous model available",
|
||||||
"noNext": "No next model available"
|
"noNext": "No next model available",
|
||||||
|
"previous": "Previous",
|
||||||
|
"next": "Next",
|
||||||
|
"switchModel": "Switch model",
|
||||||
|
"browseExamples": "Browse examples"
|
||||||
},
|
},
|
||||||
"license": {
|
"license": {
|
||||||
"noImageSell": "No selling generated content",
|
"noImageSell": "No selling generated content",
|
||||||
@@ -998,6 +1023,23 @@
|
|||||||
"noReLicense": "Same permissions required",
|
"noReLicense": "Same permissions required",
|
||||||
"restrictionsLabel": "License restrictions"
|
"restrictionsLabel": "License restrictions"
|
||||||
},
|
},
|
||||||
|
"examples": {
|
||||||
|
"add": "Add",
|
||||||
|
"addFirst": "Add your first example",
|
||||||
|
"dropFiles": "Drop files here or click to browse",
|
||||||
|
"supportedFormats": "Supports: JPG, PNG, WEBP, MP4, WEBM",
|
||||||
|
"uploading": "Uploading...",
|
||||||
|
"uploadSuccess": "Example uploaded successfully",
|
||||||
|
"uploadFailed": "Failed to upload example",
|
||||||
|
"confirmDelete": "Delete this example image?",
|
||||||
|
"deleted": "Example deleted successfully",
|
||||||
|
"deleteFailed": "Failed to delete example",
|
||||||
|
"title": "Example",
|
||||||
|
"empty": "No example images available"
|
||||||
|
},
|
||||||
|
"accordion": {
|
||||||
|
"modelDescription": "Model Description"
|
||||||
|
},
|
||||||
"loading": {
|
"loading": {
|
||||||
"exampleImages": "Loading example images...",
|
"exampleImages": "Loading example images...",
|
||||||
"description": "Loading model description...",
|
"description": "Loading model description...",
|
||||||
@@ -1572,6 +1614,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Cache Corruption Detected"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Cache Issues Detected"
|
||||||
|
},
|
||||||
|
"content": "{invalid} of {total} cache entries are invalid ({rate}). This may cause missing models or errors. Rebuilding the cache is recommended.",
|
||||||
|
"rebuildCache": "Rebuild Cache",
|
||||||
|
"dismiss": "Dismiss",
|
||||||
|
"rebuilding": "Rebuilding cache...",
|
||||||
|
"rebuildFailed": "Failed to rebuild cache: {error}",
|
||||||
|
"retry": "Retry"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "Sin crédito requerido",
|
"noCreditRequired": "Sin crédito requerido",
|
||||||
"allowSellingGeneratedContent": "Venta permitida",
|
"allowSellingGeneratedContent": "Venta permitida",
|
||||||
"noTags": "Sin etiquetas",
|
"noTags": "Sin etiquetas",
|
||||||
"clearAll": "Limpiar todos los filtros"
|
"clearAll": "Limpiar todos los filtros",
|
||||||
|
"any": "Cualquiera",
|
||||||
|
"all": "Todos",
|
||||||
|
"tagLogicAny": "Coincidir con cualquier etiqueta (O)",
|
||||||
|
"tagLogicAll": "Coincidir con todas las etiquetas (Y)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "Cambiar tema",
|
"toggle": "Cambiar tema",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Corrupción de caché detectada"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Problemas de caché detectados"
|
||||||
|
},
|
||||||
|
"content": "{invalid} de {total} entradas de caché son inválidas ({rate}). Esto puede causar modelos faltantes o errores. Se recomienda reconstruir la caché.",
|
||||||
|
"rebuildCache": "Reconstruir caché",
|
||||||
|
"dismiss": "Descartar",
|
||||||
|
"rebuilding": "Reconstruyendo caché...",
|
||||||
|
"rebuildFailed": "Error al reconstruir la caché: {error}",
|
||||||
|
"retry": "Reintentar"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "Crédit non requis",
|
"noCreditRequired": "Crédit non requis",
|
||||||
"allowSellingGeneratedContent": "Vente autorisée",
|
"allowSellingGeneratedContent": "Vente autorisée",
|
||||||
"noTags": "Aucun tag",
|
"noTags": "Aucun tag",
|
||||||
"clearAll": "Effacer tous les filtres"
|
"clearAll": "Effacer tous les filtres",
|
||||||
|
"any": "N'importe quel",
|
||||||
|
"all": "Tous",
|
||||||
|
"tagLogicAny": "Correspondre à n'importe quel tag (OU)",
|
||||||
|
"tagLogicAll": "Correspondre à tous les tags (ET)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "Basculer le thème",
|
"toggle": "Basculer le thème",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Corruption du cache détectée"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Problèmes de cache détectés"
|
||||||
|
},
|
||||||
|
"content": "{invalid} des {total} entrées de cache sont invalides ({rate}). Cela peut provoquer des modèles manquants ou des erreurs. Il est recommandé de reconstruire le cache.",
|
||||||
|
"rebuildCache": "Reconstruire le cache",
|
||||||
|
"dismiss": "Ignorer",
|
||||||
|
"rebuilding": "Reconstruction du cache...",
|
||||||
|
"rebuildFailed": "Échec de la reconstruction du cache : {error}",
|
||||||
|
"retry": "Réessayer"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,9 @@
|
|||||||
"back": "חזור",
|
"back": "חזור",
|
||||||
"next": "הבא",
|
"next": "הבא",
|
||||||
"backToTop": "חזור למעלה",
|
"backToTop": "חזור למעלה",
|
||||||
"add": "הוסף",
|
|
||||||
"settings": "הגדרות",
|
"settings": "הגדרות",
|
||||||
"help": "עזרה"
|
"help": "עזרה",
|
||||||
|
"add": "הוסף"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
"loading": "טוען...",
|
"loading": "טוען...",
|
||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "ללא קרדיט נדרש",
|
"noCreditRequired": "ללא קרדיט נדרש",
|
||||||
"allowSellingGeneratedContent": "אפשר מכירה",
|
"allowSellingGeneratedContent": "אפשר מכירה",
|
||||||
"noTags": "ללא תגיות",
|
"noTags": "ללא תגיות",
|
||||||
"clearAll": "נקה את כל המסננים"
|
"clearAll": "נקה את כל המסננים",
|
||||||
|
"any": "כלשהו",
|
||||||
|
"all": "כל התגים",
|
||||||
|
"tagLogicAny": "התאם כל תג (או)",
|
||||||
|
"tagLogicAll": "התאם את כל התגים (וגם)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "החלף ערכת נושא",
|
"toggle": "החלף ערכת נושא",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "זוהתה שחיתות במטמון"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "זוהו בעיות במטמון"
|
||||||
|
},
|
||||||
|
"content": "{invalid} מתוך {total} רשומות מטמון אינן תקינות ({rate}). זה עלול לגרום לדגמים חסרים או לשגיאות. מומלץ לבנות מחדש את המטמון.",
|
||||||
|
"rebuildCache": "בניית מטמון מחדש",
|
||||||
|
"dismiss": "ביטול",
|
||||||
|
"rebuilding": "בונה מחדש את המטמון...",
|
||||||
|
"rebuildFailed": "נכשלה בניית המטמון מחדש: {error}",
|
||||||
|
"retry": "נסה שוב"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "クレジット不要",
|
"noCreditRequired": "クレジット不要",
|
||||||
"allowSellingGeneratedContent": "販売許可",
|
"allowSellingGeneratedContent": "販売許可",
|
||||||
"noTags": "タグなし",
|
"noTags": "タグなし",
|
||||||
"clearAll": "すべてのフィルタをクリア"
|
"clearAll": "すべてのフィルタをクリア",
|
||||||
|
"any": "いずれか",
|
||||||
|
"all": "すべて",
|
||||||
|
"tagLogicAny": "いずれかのタグに一致 (OR)",
|
||||||
|
"tagLogicAll": "すべてのタグに一致 (AND)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "テーマの切り替え",
|
"toggle": "テーマの切り替え",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "キャッシュの破損が検出されました"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "キャッシュの問題が検出されました"
|
||||||
|
},
|
||||||
|
"content": "{total}個のキャッシュエントリのうち{invalid}個が無効です({rate})。モデルが見つからない原因になったり、エラーが発生する可能性があります。キャッシュの再構築を推奨します。",
|
||||||
|
"rebuildCache": "キャッシュを再構築",
|
||||||
|
"dismiss": "閉じる",
|
||||||
|
"rebuilding": "キャッシュを再構築中...",
|
||||||
|
"rebuildFailed": "キャッシュの再構築に失敗しました: {error}",
|
||||||
|
"retry": "再試行"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "크레딧 표기 없음",
|
"noCreditRequired": "크레딧 표기 없음",
|
||||||
"allowSellingGeneratedContent": "판매 허용",
|
"allowSellingGeneratedContent": "판매 허용",
|
||||||
"noTags": "태그 없음",
|
"noTags": "태그 없음",
|
||||||
"clearAll": "모든 필터 지우기"
|
"clearAll": "모든 필터 지우기",
|
||||||
|
"any": "아무",
|
||||||
|
"all": "모두",
|
||||||
|
"tagLogicAny": "모든 태그 일치 (OR)",
|
||||||
|
"tagLogicAll": "모든 태그 일치 (AND)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "테마 토글",
|
"toggle": "테마 토글",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "캐시 손상이 감지되었습니다"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "캐시 문제가 감지되었습니다"
|
||||||
|
},
|
||||||
|
"content": "{total}개의 캐시 항목 중 {invalid}개가 유효하지 않습니다 ({rate}). 모델 누락이나 오류가 발생할 수 있습니다. 캐시를 재구축하는 것이 좋습니다.",
|
||||||
|
"rebuildCache": "캐시 재구축",
|
||||||
|
"dismiss": "무시",
|
||||||
|
"rebuilding": "캐시 재구축 중...",
|
||||||
|
"rebuildFailed": "캐시 재구축 실패: {error}",
|
||||||
|
"retry": "다시 시도"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "Без указания авторства",
|
"noCreditRequired": "Без указания авторства",
|
||||||
"allowSellingGeneratedContent": "Продажа разрешена",
|
"allowSellingGeneratedContent": "Продажа разрешена",
|
||||||
"noTags": "Без тегов",
|
"noTags": "Без тегов",
|
||||||
"clearAll": "Очистить все фильтры"
|
"clearAll": "Очистить все фильтры",
|
||||||
|
"any": "Любой",
|
||||||
|
"all": "Все",
|
||||||
|
"tagLogicAny": "Совпадение с любым тегом (ИЛИ)",
|
||||||
|
"tagLogicAll": "Совпадение со всеми тегами (И)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "Переключить тему",
|
"toggle": "Переключить тему",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Обнаружено повреждение кэша"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Обнаружены проблемы с кэшем"
|
||||||
|
},
|
||||||
|
"content": "{invalid} из {total} записей кэша недействительны ({rate}). Это может привести к отсутствию моделей или ошибкам. Рекомендуется перестроить кэш.",
|
||||||
|
"rebuildCache": "Перестроить кэш",
|
||||||
|
"dismiss": "Отклонить",
|
||||||
|
"rebuilding": "Перестроение кэша...",
|
||||||
|
"rebuildFailed": "Не удалось перестроить кэш: {error}",
|
||||||
|
"retry": "Повторить"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "无需署名",
|
"noCreditRequired": "无需署名",
|
||||||
"allowSellingGeneratedContent": "允许销售",
|
"allowSellingGeneratedContent": "允许销售",
|
||||||
"noTags": "无标签",
|
"noTags": "无标签",
|
||||||
"clearAll": "清除所有筛选"
|
"clearAll": "清除所有筛选",
|
||||||
|
"any": "任一",
|
||||||
|
"all": "全部",
|
||||||
|
"tagLogicAny": "匹配任一标签 (或)",
|
||||||
|
"tagLogicAll": "匹配所有标签 (与)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "切换主题",
|
"toggle": "切换主题",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "来爱发电为Lora Manager项目发电,支持项目持续开发的同时,获取浏览器插件验证码,按季支付更优惠!支付宝/微信方便支付。感谢支持!🚀",
|
"content": "来爱发电为Lora Manager项目发电,支持项目持续开发的同时,获取浏览器插件验证码,按季支付更优惠!支付宝/微信方便支付。感谢支持!🚀",
|
||||||
"supportCta": "为LM发电",
|
"supportCta": "为LM发电",
|
||||||
"learnMore": "浏览器插件教程"
|
"learnMore": "浏览器插件教程"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "检测到缓存损坏"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "检测到缓存问题"
|
||||||
|
},
|
||||||
|
"content": "{total} 个缓存条目中有 {invalid} 个无效({rate})。这可能导致模型丢失或错误。建议重建缓存。",
|
||||||
|
"rebuildCache": "重建缓存",
|
||||||
|
"dismiss": "忽略",
|
||||||
|
"rebuilding": "正在重建缓存...",
|
||||||
|
"rebuildFailed": "重建缓存失败:{error}",
|
||||||
|
"retry": "重试"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -223,7 +223,11 @@
|
|||||||
"noCreditRequired": "無需署名",
|
"noCreditRequired": "無需署名",
|
||||||
"allowSellingGeneratedContent": "允許銷售",
|
"allowSellingGeneratedContent": "允許銷售",
|
||||||
"noTags": "無標籤",
|
"noTags": "無標籤",
|
||||||
"clearAll": "清除所有篩選"
|
"clearAll": "清除所有篩選",
|
||||||
|
"any": "任一",
|
||||||
|
"all": "全部",
|
||||||
|
"tagLogicAny": "符合任一票籤 (或)",
|
||||||
|
"tagLogicAll": "符合所有標籤 (與)"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
"toggle": "切換主題",
|
"toggle": "切換主題",
|
||||||
@@ -1572,6 +1576,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "檢測到快取損壞"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "檢測到快取問題"
|
||||||
|
},
|
||||||
|
"content": "{total} 個快取項目中有 {invalid} 個無效({rate})。這可能會導致模型遺失或錯誤。建議重建快取。",
|
||||||
|
"rebuildCache": "重建快取",
|
||||||
|
"dismiss": "關閉",
|
||||||
|
"rebuilding": "重建快取中...",
|
||||||
|
"rebuildFailed": "重建快取失敗:{error}",
|
||||||
|
"retry": "重試"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,9 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "vitest run",
|
"test": "npm run test:js && npm run test:vue",
|
||||||
|
"test:js": "vitest run",
|
||||||
|
"test:vue": "cd vue-widgets && npx vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
"test:coverage": "node scripts/run_frontend_coverage.js"
|
"test:coverage": "node scripts/run_frontend_coverage.js"
|
||||||
},
|
},
|
||||||
|
|||||||
205
py/config.py
205
py/config.py
@@ -441,82 +441,53 @@ class Config:
|
|||||||
logger.info("Failed to write symlink cache %s: %s", cache_path, exc)
|
logger.info("Failed to write symlink cache %s: %s", cache_path, exc)
|
||||||
|
|
||||||
def _scan_symbolic_links(self):
|
def _scan_symbolic_links(self):
|
||||||
"""Scan all symbolic links in LoRA, Checkpoint, and Embedding root directories"""
|
"""Scan symbolic links in LoRA, Checkpoint, and Embedding root directories.
|
||||||
|
|
||||||
|
Only scans the first level of each root directory to avoid performance
|
||||||
|
issues with large file systems. Detects symlinks and Windows junctions
|
||||||
|
at the root level only (not nested symlinks in subdirectories).
|
||||||
|
"""
|
||||||
start = time.perf_counter()
|
start = time.perf_counter()
|
||||||
|
|
||||||
# Reset mappings before rescanning to avoid stale entries
|
# Reset mappings before rescanning to avoid stale entries
|
||||||
self._path_mappings.clear()
|
self._path_mappings.clear()
|
||||||
self._seed_root_symlink_mappings()
|
self._seed_root_symlink_mappings()
|
||||||
visited_dirs: Set[str] = set()
|
|
||||||
for root in self._symlink_roots():
|
for root in self._symlink_roots():
|
||||||
self._scan_directory_links(root, visited_dirs)
|
self._scan_first_level_symlinks(root)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Symlink scan finished in %.2f ms with %d mappings",
|
"Symlink scan finished in %.2f ms with %d mappings",
|
||||||
(time.perf_counter() - start) * 1000,
|
(time.perf_counter() - start) * 1000,
|
||||||
len(self._path_mappings),
|
len(self._path_mappings),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _scan_directory_links(self, root: str, visited_dirs: Set[str]):
|
def _scan_first_level_symlinks(self, root: str):
|
||||||
"""Iteratively scan directory symlinks to avoid deep recursion."""
|
"""Scan only the first level of a directory for symlinks.
|
||||||
|
|
||||||
|
This avoids traversing the entire directory tree which can be extremely
|
||||||
|
slow for large model collections. Only symlinks directly under the root
|
||||||
|
are detected.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
# Note: We only use realpath for the initial root if it's not already resolved
|
with os.scandir(root) as it:
|
||||||
# to ensure we have a valid entry point.
|
for entry in it:
|
||||||
root_real = self._normalize_path(os.path.realpath(root))
|
try:
|
||||||
except OSError:
|
# Only detect symlinks including Windows junctions
|
||||||
root_real = self._normalize_path(root)
|
# Skip normal directories to avoid deep traversal
|
||||||
|
if not self._entry_is_symlink(entry):
|
||||||
|
continue
|
||||||
|
|
||||||
if root_real in visited_dirs:
|
# Resolve the symlink target
|
||||||
return
|
target_path = os.path.realpath(entry.path)
|
||||||
|
if not os.path.isdir(target_path):
|
||||||
|
continue
|
||||||
|
|
||||||
visited_dirs.add(root_real)
|
self.add_path_mapping(entry.path, target_path)
|
||||||
# Stack entries: (display_path, real_resolved_path)
|
except Exception as inner_exc:
|
||||||
stack: List[Tuple[str, str]] = [(root, root_real)]
|
logger.debug(
|
||||||
|
"Error processing directory entry %s: %s", entry.path, inner_exc
|
||||||
while stack:
|
)
|
||||||
current_display, current_real = stack.pop()
|
except Exception as e:
|
||||||
try:
|
logger.error(f"Error scanning links in {root}: {e}")
|
||||||
with os.scandir(current_display) as it:
|
|
||||||
for entry in it:
|
|
||||||
try:
|
|
||||||
# 1. Detect symlinks including Windows junctions
|
|
||||||
is_link = self._entry_is_symlink(entry)
|
|
||||||
|
|
||||||
if is_link:
|
|
||||||
# Only resolve realpath when we actually find a link
|
|
||||||
target_path = os.path.realpath(entry.path)
|
|
||||||
if not os.path.isdir(target_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
normalized_target = self._normalize_path(target_path)
|
|
||||||
self.add_path_mapping(entry.path, target_path)
|
|
||||||
|
|
||||||
if normalized_target in visited_dirs:
|
|
||||||
continue
|
|
||||||
|
|
||||||
visited_dirs.add(normalized_target)
|
|
||||||
stack.append((target_path, normalized_target))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 2. Process normal directories
|
|
||||||
if not entry.is_dir(follow_symlinks=False):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# For normal directories, we avoid realpath() call by
|
|
||||||
# incrementally building the real path relative to current_real.
|
|
||||||
# This is safe because 'entry' is NOT a symlink.
|
|
||||||
entry_real = self._normalize_path(os.path.join(current_real, entry.name))
|
|
||||||
|
|
||||||
if entry_real in visited_dirs:
|
|
||||||
continue
|
|
||||||
|
|
||||||
visited_dirs.add(entry_real)
|
|
||||||
stack.append((entry.path, entry_real))
|
|
||||||
except Exception as inner_exc:
|
|
||||||
logger.debug(
|
|
||||||
"Error processing directory entry %s: %s", entry.path, inner_exc
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error scanning links in {current_display}: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -674,6 +645,23 @@ class Config:
|
|||||||
checkpoint_map = self._dedupe_existing_paths(checkpoint_paths)
|
checkpoint_map = self._dedupe_existing_paths(checkpoint_paths)
|
||||||
unet_map = self._dedupe_existing_paths(unet_paths)
|
unet_map = self._dedupe_existing_paths(unet_paths)
|
||||||
|
|
||||||
|
# Detect when checkpoints and unet share the same physical location
|
||||||
|
# This is a configuration issue that can cause duplicate model entries
|
||||||
|
overlapping_real_paths = set(checkpoint_map.keys()) & set(unet_map.keys())
|
||||||
|
if overlapping_real_paths:
|
||||||
|
logger.warning(
|
||||||
|
"Detected overlapping paths between 'checkpoints' and 'diffusion_models' (unet). "
|
||||||
|
"They should not point to the same physical folder as they are different model types. "
|
||||||
|
"Please fix your ComfyUI path configuration to separate these folders. "
|
||||||
|
"Falling back to 'checkpoints' for backward compatibility. "
|
||||||
|
"Overlapping real paths: %s",
|
||||||
|
[checkpoint_map.get(rp, rp) for rp in overlapping_real_paths]
|
||||||
|
)
|
||||||
|
# Remove overlapping paths from unet_map to prioritize checkpoints
|
||||||
|
for rp in overlapping_real_paths:
|
||||||
|
if rp in unet_map:
|
||||||
|
del unet_map[rp]
|
||||||
|
|
||||||
merged_map: Dict[str, str] = {}
|
merged_map: Dict[str, str] = {}
|
||||||
for real_path, original in {**checkpoint_map, **unet_map}.items():
|
for real_path, original in {**checkpoint_map, **unet_map}.items():
|
||||||
if real_path not in merged_map:
|
if real_path not in merged_map:
|
||||||
@@ -778,7 +766,23 @@ class Config:
|
|||||||
return f'/api/lm/previews?path={encoded_path}'
|
return f'/api/lm/previews?path={encoded_path}'
|
||||||
|
|
||||||
def is_preview_path_allowed(self, preview_path: str) -> bool:
|
def is_preview_path_allowed(self, preview_path: str) -> bool:
|
||||||
"""Return ``True`` if ``preview_path`` is within an allowed directory."""
|
"""Return ``True`` if ``preview_path`` is within an allowed directory.
|
||||||
|
|
||||||
|
If the path is initially rejected, attempts to discover deep symlinks
|
||||||
|
that were not scanned during initialization. If a symlink is found,
|
||||||
|
updates the in-memory path mappings and retries the check.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self._is_path_in_allowed_roots(preview_path):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self._try_discover_deep_symlink(preview_path):
|
||||||
|
return self._is_path_in_allowed_roots(preview_path)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _is_path_in_allowed_roots(self, preview_path: str) -> bool:
|
||||||
|
"""Check if preview_path is within allowed preview roots without modification."""
|
||||||
|
|
||||||
if not preview_path:
|
if not preview_path:
|
||||||
return False
|
return False
|
||||||
@@ -788,29 +792,72 @@ class Config:
|
|||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Use os.path.normcase for case-insensitive comparison on Windows.
|
|
||||||
# On Windows, Path.relative_to() is case-sensitive for drive letters,
|
|
||||||
# causing paths like 'a:/folder' to not match 'A:/folder'.
|
|
||||||
candidate_str = os.path.normcase(str(candidate))
|
candidate_str = os.path.normcase(str(candidate))
|
||||||
for root in self._preview_root_paths:
|
for root in self._preview_root_paths:
|
||||||
root_str = os.path.normcase(str(root))
|
root_str = os.path.normcase(str(root))
|
||||||
# Check if candidate is equal to or under the root directory
|
|
||||||
if candidate_str == root_str or candidate_str.startswith(root_str + os.sep):
|
if candidate_str == root_str or candidate_str.startswith(root_str + os.sep):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if self._preview_root_paths:
|
logger.debug(
|
||||||
logger.debug(
|
"Path not in allowed roots: %s (candidate=%s, num_roots=%d)",
|
||||||
"Preview path rejected: %s (candidate=%s, num_roots=%d, first_root=%s)",
|
preview_path,
|
||||||
preview_path,
|
candidate_str,
|
||||||
candidate_str,
|
len(self._preview_root_paths),
|
||||||
len(self._preview_root_paths),
|
)
|
||||||
os.path.normcase(str(next(iter(self._preview_root_paths)))),
|
|
||||||
)
|
return False
|
||||||
else:
|
|
||||||
logger.debug(
|
def _try_discover_deep_symlink(self, preview_path: str) -> bool:
|
||||||
"Preview path rejected (no roots configured): %s",
|
"""Attempt to discover a deep symlink that contains the preview_path.
|
||||||
preview_path,
|
|
||||||
)
|
Walks up from the preview path to the root directories, checking each
|
||||||
|
parent directory for symlinks. If a symlink is found, updates the
|
||||||
|
in-memory path mappings and preview roots.
|
||||||
|
|
||||||
|
Only updates in-memory state (self._path_mappings and self._preview_root_paths),
|
||||||
|
does not modify the persistent cache file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if a symlink was discovered and mappings updated, False otherwise.
|
||||||
|
"""
|
||||||
|
if not preview_path:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
candidate = Path(preview_path).expanduser()
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
current = candidate
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
if self._is_link(str(current)):
|
||||||
|
try:
|
||||||
|
target = os.path.realpath(str(current))
|
||||||
|
normalized_target = self._normalize_path(target)
|
||||||
|
normalized_link = self._normalize_path(str(current))
|
||||||
|
|
||||||
|
self._path_mappings[normalized_target] = normalized_link
|
||||||
|
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
|
||||||
|
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Discovered deep symlink: %s -> %s (preview path: %s)",
|
||||||
|
normalized_link,
|
||||||
|
normalized_target,
|
||||||
|
preview_path
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
parent = current.parent
|
||||||
|
if parent == current:
|
||||||
|
break
|
||||||
|
current = parent
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Check if running in standalone mode
|
# Check if running in standalone mode
|
||||||
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||||
@@ -14,7 +17,7 @@ if not standalone_mode:
|
|||||||
# Initialize registry
|
# Initialize registry
|
||||||
registry = MetadataRegistry()
|
registry = MetadataRegistry()
|
||||||
|
|
||||||
print("ComfyUI Metadata Collector initialized")
|
logger.info("ComfyUI Metadata Collector initialized")
|
||||||
|
|
||||||
def get_metadata(prompt_id=None):
|
def get_metadata(prompt_id=None):
|
||||||
"""Helper function to get metadata from the registry"""
|
"""Helper function to get metadata from the registry"""
|
||||||
@@ -23,7 +26,7 @@ if not standalone_mode:
|
|||||||
else:
|
else:
|
||||||
# Standalone mode - provide dummy implementations
|
# Standalone mode - provide dummy implementations
|
||||||
def init():
|
def init():
|
||||||
print("ComfyUI Metadata Collector disabled in standalone mode")
|
logger.info("ComfyUI Metadata Collector disabled in standalone mode")
|
||||||
|
|
||||||
def get_metadata(prompt_id=None):
|
def get_metadata(prompt_id=None):
|
||||||
"""Dummy implementation for standalone mode"""
|
"""Dummy implementation for standalone mode"""
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
|
import logging
|
||||||
from .metadata_registry import MetadataRegistry
|
from .metadata_registry import MetadataRegistry
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class MetadataHook:
|
class MetadataHook:
|
||||||
"""Install hooks for metadata collection"""
|
"""Install hooks for metadata collection"""
|
||||||
|
|
||||||
@@ -23,7 +26,7 @@ class MetadataHook:
|
|||||||
|
|
||||||
# If we can't find the execution module, we can't install hooks
|
# If we can't find the execution module, we can't install hooks
|
||||||
if execution is None:
|
if execution is None:
|
||||||
print("Could not locate ComfyUI execution module, metadata collection disabled")
|
logger.warning("Could not locate ComfyUI execution module, metadata collection disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Detect whether we're using the new async version of ComfyUI
|
# Detect whether we're using the new async version of ComfyUI
|
||||||
@@ -37,16 +40,16 @@ class MetadataHook:
|
|||||||
is_async = inspect.iscoroutinefunction(execution._map_node_over_list)
|
is_async = inspect.iscoroutinefunction(execution._map_node_over_list)
|
||||||
|
|
||||||
if is_async:
|
if is_async:
|
||||||
print("Detected async ComfyUI execution, installing async metadata hooks")
|
logger.info("Detected async ComfyUI execution, installing async metadata hooks")
|
||||||
MetadataHook._install_async_hooks(execution, map_node_func_name)
|
MetadataHook._install_async_hooks(execution, map_node_func_name)
|
||||||
else:
|
else:
|
||||||
print("Detected sync ComfyUI execution, installing sync metadata hooks")
|
logger.info("Detected sync ComfyUI execution, installing sync metadata hooks")
|
||||||
MetadataHook._install_sync_hooks(execution)
|
MetadataHook._install_sync_hooks(execution)
|
||||||
|
|
||||||
print("Metadata collection hooks installed for runtime values")
|
logger.info("Metadata collection hooks installed for runtime values")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error installing metadata hooks: {str(e)}")
|
logger.error(f"Error installing metadata hooks: {str(e)}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _install_sync_hooks(execution):
|
def _install_sync_hooks(execution):
|
||||||
@@ -82,7 +85,7 @@ class MetadataHook:
|
|||||||
if node_id is not None:
|
if node_id is not None:
|
||||||
registry.record_node_execution(node_id, class_type, input_data_all, None)
|
registry.record_node_execution(node_id, class_type, input_data_all, None)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error collecting metadata (pre-execution): {str(e)}")
|
logger.error(f"Error collecting metadata (pre-execution): {str(e)}")
|
||||||
|
|
||||||
# Execute the original function
|
# Execute the original function
|
||||||
results = original_map_node_over_list(obj, input_data_all, func, allow_interrupt, execution_block_cb, pre_execute_cb)
|
results = original_map_node_over_list(obj, input_data_all, func, allow_interrupt, execution_block_cb, pre_execute_cb)
|
||||||
@@ -113,7 +116,7 @@ class MetadataHook:
|
|||||||
if node_id is not None:
|
if node_id is not None:
|
||||||
registry.update_node_execution(node_id, class_type, results)
|
registry.update_node_execution(node_id, class_type, results)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error collecting metadata (post-execution): {str(e)}")
|
logger.error(f"Error collecting metadata (post-execution): {str(e)}")
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@@ -159,7 +162,7 @@ class MetadataHook:
|
|||||||
if node_id is not None:
|
if node_id is not None:
|
||||||
registry.record_node_execution(node_id, class_type, input_data_all, None)
|
registry.record_node_execution(node_id, class_type, input_data_all, None)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error collecting metadata (pre-execution): {str(e)}")
|
logger.error(f"Error collecting metadata (pre-execution): {str(e)}")
|
||||||
|
|
||||||
# Call original function with all args/kwargs
|
# Call original function with all args/kwargs
|
||||||
results = await original_map_node_over_list(
|
results = await original_map_node_over_list(
|
||||||
@@ -176,7 +179,7 @@ class MetadataHook:
|
|||||||
if node_id is not None:
|
if node_id is not None:
|
||||||
registry.update_node_execution(node_id, class_type, results)
|
registry.update_node_execution(node_id, class_type, results)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error collecting metadata (post-execution): {str(e)}")
|
logger.error(f"Error collecting metadata (post-execution): {str(e)}")
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|||||||
@@ -126,9 +126,7 @@ class LoraCyclerLM:
|
|||||||
"current_index": [clamped_index],
|
"current_index": [clamped_index],
|
||||||
"next_index": [next_index],
|
"next_index": [next_index],
|
||||||
"total_count": [total_count],
|
"total_count": [total_count],
|
||||||
"current_lora_name": [
|
"current_lora_name": [current_lora["file_name"]],
|
||||||
current_lora.get("model_name", current_lora["file_name"])
|
|
||||||
],
|
|
||||||
"current_lora_filename": [current_lora["file_name"]],
|
"current_lora_filename": [current_lora["file_name"]],
|
||||||
"next_lora_name": [next_display_name],
|
"next_lora_name": [next_display_name],
|
||||||
"next_lora_filename": [next_lora["file_name"]],
|
"next_lora_filename": [next_lora["file_name"]],
|
||||||
|
|||||||
@@ -8,6 +8,9 @@ from ..metadata_collector.metadata_processor import MetadataProcessor
|
|||||||
from ..metadata_collector import get_metadata
|
from ..metadata_collector import get_metadata
|
||||||
from PIL import Image, PngImagePlugin
|
from PIL import Image, PngImagePlugin
|
||||||
import piexif
|
import piexif
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class SaveImageLM:
|
class SaveImageLM:
|
||||||
NAME = "Save Image (LoraManager)"
|
NAME = "Save Image (LoraManager)"
|
||||||
@@ -385,7 +388,7 @@ class SaveImageLM:
|
|||||||
exif_bytes = piexif.dump(exif_dict)
|
exif_bytes = piexif.dump(exif_dict)
|
||||||
save_kwargs["exif"] = exif_bytes
|
save_kwargs["exif"] = exif_bytes
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error adding EXIF data: {e}")
|
logger.error(f"Error adding EXIF data: {e}")
|
||||||
img.save(file_path, format="JPEG", **save_kwargs)
|
img.save(file_path, format="JPEG", **save_kwargs)
|
||||||
elif file_format == "webp":
|
elif file_format == "webp":
|
||||||
try:
|
try:
|
||||||
@@ -403,7 +406,7 @@ class SaveImageLM:
|
|||||||
exif_bytes = piexif.dump(exif_dict)
|
exif_bytes = piexif.dump(exif_dict)
|
||||||
save_kwargs["exif"] = exif_bytes
|
save_kwargs["exif"] = exif_bytes
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error adding EXIF data: {e}")
|
logger.error(f"Error adding EXIF data: {e}")
|
||||||
|
|
||||||
img.save(file_path, format="WEBP", **save_kwargs)
|
img.save(file_path, format="WEBP", **save_kwargs)
|
||||||
|
|
||||||
@@ -414,7 +417,7 @@ class SaveImageLM:
|
|||||||
})
|
})
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error saving image: {e}")
|
logger.error(f"Error saving image: {e}")
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|||||||
@@ -60,6 +60,22 @@ class TriggerWordToggleLM:
|
|||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def _normalize_trigger_words(self, trigger_words):
|
||||||
|
"""Normalize trigger words by splitting by both single and double commas, stripping whitespace, and filtering empty strings"""
|
||||||
|
if not trigger_words or not isinstance(trigger_words, str):
|
||||||
|
return set()
|
||||||
|
|
||||||
|
# Split by double commas first to preserve groups, then by single commas
|
||||||
|
groups = re.split(r",{2,}", trigger_words)
|
||||||
|
words = []
|
||||||
|
for group in groups:
|
||||||
|
# Split each group by single comma
|
||||||
|
group_words = [word.strip() for word in group.split(",")]
|
||||||
|
words.extend(group_words)
|
||||||
|
|
||||||
|
# Filter out empty strings and return as set
|
||||||
|
return set(word for word in words if word)
|
||||||
|
|
||||||
def process_trigger_words(
|
def process_trigger_words(
|
||||||
self,
|
self,
|
||||||
id,
|
id,
|
||||||
@@ -81,7 +97,7 @@ class TriggerWordToggleLM:
|
|||||||
if (
|
if (
|
||||||
trigger_words_override
|
trigger_words_override
|
||||||
and isinstance(trigger_words_override, str)
|
and isinstance(trigger_words_override, str)
|
||||||
and trigger_words_override != trigger_words
|
and self._normalize_trigger_words(trigger_words_override) != self._normalize_trigger_words(trigger_words)
|
||||||
):
|
):
|
||||||
filtered_triggers = trigger_words_override
|
filtered_triggers = trigger_words_override
|
||||||
return (filtered_triggers,)
|
return (filtered_triggers,)
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
|||||||
RouteDefinition("POST", "/api/lm/force-download-example-images", "force_download_example_images"),
|
RouteDefinition("POST", "/api/lm/force-download-example-images", "force_download_example_images"),
|
||||||
RouteDefinition("POST", "/api/lm/cleanup-example-image-folders", "cleanup_example_image_folders"),
|
RouteDefinition("POST", "/api/lm/cleanup-example-image-folders", "cleanup_example_image_folders"),
|
||||||
RouteDefinition("POST", "/api/lm/example-images/set-nsfw-level", "set_example_image_nsfw_level"),
|
RouteDefinition("POST", "/api/lm/example-images/set-nsfw-level", "set_example_image_nsfw_level"),
|
||||||
|
RouteDefinition("POST", "/api/lm/check-example-images-needed", "check_example_images_needed"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -92,6 +92,19 @@ class ExampleImagesDownloadHandler:
|
|||||||
except ExampleImagesDownloadError as exc:
|
except ExampleImagesDownloadError as exc:
|
||||||
return web.json_response({'success': False, 'error': str(exc)}, status=500)
|
return web.json_response({'success': False, 'error': str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def check_example_images_needed(self, request: web.Request) -> web.StreamResponse:
|
||||||
|
"""Lightweight check to see if any models need example images downloaded."""
|
||||||
|
try:
|
||||||
|
payload = await request.json()
|
||||||
|
model_types = payload.get('model_types', ['lora', 'checkpoint', 'embedding'])
|
||||||
|
result = await self._download_manager.check_pending_models(model_types)
|
||||||
|
return web.json_response(result)
|
||||||
|
except Exception as exc:
|
||||||
|
return web.json_response(
|
||||||
|
{'success': False, 'error': str(exc)},
|
||||||
|
status=500
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ExampleImagesManagementHandler:
|
class ExampleImagesManagementHandler:
|
||||||
"""HTTP adapters for import/delete endpoints."""
|
"""HTTP adapters for import/delete endpoints."""
|
||||||
@@ -161,6 +174,7 @@ class ExampleImagesHandlerSet:
|
|||||||
"resume_example_images": self.download.resume_example_images,
|
"resume_example_images": self.download.resume_example_images,
|
||||||
"stop_example_images": self.download.stop_example_images,
|
"stop_example_images": self.download.stop_example_images,
|
||||||
"force_download_example_images": self.download.force_download_example_images,
|
"force_download_example_images": self.download.force_download_example_images,
|
||||||
|
"check_example_images_needed": self.download.check_example_images_needed,
|
||||||
"import_example_images": self.management.import_example_images,
|
"import_example_images": self.management.import_example_images,
|
||||||
"delete_example_image": self.management.delete_example_image,
|
"delete_example_image": self.management.delete_example_image,
|
||||||
"set_example_image_nsfw_level": self.management.set_example_image_nsfw_level,
|
"set_example_image_nsfw_level": self.management.set_example_image_nsfw_level,
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import asyncio
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
||||||
@@ -269,6 +270,11 @@ class ModelListingHandler:
|
|||||||
request.query.get("update_available_only", "false").lower() == "true"
|
request.query.get("update_available_only", "false").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Tag logic: "any" (OR) or "all" (AND) for include tags
|
||||||
|
tag_logic = request.query.get("tag_logic", "any").lower()
|
||||||
|
if tag_logic not in ("any", "all"):
|
||||||
|
tag_logic = "any"
|
||||||
|
|
||||||
# New license-based query filters
|
# New license-based query filters
|
||||||
credit_required = request.query.get("credit_required")
|
credit_required = request.query.get("credit_required")
|
||||||
if credit_required is not None:
|
if credit_required is not None:
|
||||||
@@ -297,6 +303,7 @@ class ModelListingHandler:
|
|||||||
"fuzzy_search": fuzzy_search,
|
"fuzzy_search": fuzzy_search,
|
||||||
"base_models": base_models,
|
"base_models": base_models,
|
||||||
"tags": tag_filters,
|
"tags": tag_filters,
|
||||||
|
"tag_logic": tag_logic,
|
||||||
"search_options": search_options,
|
"search_options": search_options,
|
||||||
"hash_filters": hash_filters,
|
"hash_filters": hash_filters,
|
||||||
"favorites_only": favorites_only,
|
"favorites_only": favorites_only,
|
||||||
@@ -755,19 +762,22 @@ class ModelQueryHandler:
|
|||||||
|
|
||||||
async def find_duplicate_models(self, request: web.Request) -> web.Response:
|
async def find_duplicate_models(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
|
filters = self._parse_duplicate_filters(request)
|
||||||
duplicates = self._service.find_duplicate_hashes()
|
duplicates = self._service.find_duplicate_hashes()
|
||||||
result = []
|
result = []
|
||||||
cache = await self._service.scanner.get_cached_data()
|
cache = await self._service.scanner.get_cached_data()
|
||||||
|
|
||||||
for sha256, paths in duplicates.items():
|
for sha256, paths in duplicates.items():
|
||||||
group = {"hash": sha256, "models": []}
|
# Collect all models in this group
|
||||||
|
all_models = []
|
||||||
for path in paths:
|
for path in paths:
|
||||||
model = next(
|
model = next(
|
||||||
(m for m in cache.raw_data if m["file_path"] == path), None
|
(m for m in cache.raw_data if m["file_path"] == path), None
|
||||||
)
|
)
|
||||||
if model:
|
if model:
|
||||||
group["models"].append(
|
all_models.append(model)
|
||||||
await self._service.format_response(model)
|
|
||||||
)
|
# Include primary if not already in paths
|
||||||
primary_path = self._service.get_path_by_hash(sha256)
|
primary_path = self._service.get_path_by_hash(sha256)
|
||||||
if primary_path and primary_path not in paths:
|
if primary_path and primary_path not in paths:
|
||||||
primary_model = next(
|
primary_model = next(
|
||||||
@@ -775,11 +785,25 @@ class ModelQueryHandler:
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
if primary_model:
|
if primary_model:
|
||||||
group["models"].insert(
|
all_models.insert(0, primary_model)
|
||||||
0, await self._service.format_response(primary_model)
|
|
||||||
)
|
# Apply filters
|
||||||
|
filtered = self._apply_duplicate_filters(all_models, filters)
|
||||||
|
|
||||||
|
# Sort: originals first, copies last
|
||||||
|
sorted_models = self._sort_duplicate_group(filtered)
|
||||||
|
|
||||||
|
# Format response
|
||||||
|
group = {"hash": sha256, "models": []}
|
||||||
|
for model in sorted_models:
|
||||||
|
group["models"].append(
|
||||||
|
await self._service.format_response(model)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Only include groups with 2+ models after filtering
|
||||||
if len(group["models"]) > 1:
|
if len(group["models"]) > 1:
|
||||||
result.append(group)
|
result.append(group)
|
||||||
|
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{"success": True, "duplicates": result, "count": len(result)}
|
{"success": True, "duplicates": result, "count": len(result)}
|
||||||
)
|
)
|
||||||
@@ -792,6 +816,83 @@ class ModelQueryHandler:
|
|||||||
)
|
)
|
||||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
def _parse_duplicate_filters(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Parse filter parameters from the request for duplicate finding."""
|
||||||
|
return {
|
||||||
|
"base_models": request.query.getall("base_model", []),
|
||||||
|
"tag_include": request.query.getall("tag_include", []),
|
||||||
|
"tag_exclude": request.query.getall("tag_exclude", []),
|
||||||
|
"model_types": request.query.getall("model_type", []),
|
||||||
|
"folder": request.query.get("folder"),
|
||||||
|
"favorites_only": request.query.get("favorites_only", "").lower() == "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
def _apply_duplicate_filters(self, models: List[Dict[str, Any]], filters: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||||
|
"""Apply filters to a list of models within a duplicate group."""
|
||||||
|
result = models
|
||||||
|
|
||||||
|
# Apply base model filter
|
||||||
|
if filters.get("base_models"):
|
||||||
|
base_set = set(filters["base_models"])
|
||||||
|
result = [m for m in result if m.get("base_model") in base_set]
|
||||||
|
|
||||||
|
# Apply tag filters (include)
|
||||||
|
for tag in filters.get("tag_include", []):
|
||||||
|
if tag == "__no_tags__":
|
||||||
|
result = [m for m in result if not m.get("tags")]
|
||||||
|
else:
|
||||||
|
result = [m for m in result if tag in (m.get("tags") or [])]
|
||||||
|
|
||||||
|
# Apply tag filters (exclude)
|
||||||
|
for tag in filters.get("tag_exclude", []):
|
||||||
|
if tag == "__no_tags__":
|
||||||
|
result = [m for m in result if m.get("tags")]
|
||||||
|
else:
|
||||||
|
result = [m for m in result if tag not in (m.get("tags") or [])]
|
||||||
|
|
||||||
|
# Apply model type filter
|
||||||
|
if filters.get("model_types"):
|
||||||
|
type_set = {t.lower() for t in filters["model_types"]}
|
||||||
|
result = [
|
||||||
|
m for m in result if (m.get("model_type") or "").lower() in type_set
|
||||||
|
]
|
||||||
|
|
||||||
|
# Apply folder filter
|
||||||
|
if filters.get("folder"):
|
||||||
|
folder = filters["folder"]
|
||||||
|
result = [m for m in result if m.get("folder", "").startswith(folder)]
|
||||||
|
|
||||||
|
# Apply favorites filter
|
||||||
|
if filters.get("favorites_only"):
|
||||||
|
result = [m for m in result if m.get("favorite", False)]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _sort_duplicate_group(self, models: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Sort models: originals first (left), copies (with -????. pattern) last (right)."""
|
||||||
|
if len(models) <= 1:
|
||||||
|
return models
|
||||||
|
|
||||||
|
min_len = min(len(m.get("file_name", "")) for m in models)
|
||||||
|
|
||||||
|
def copy_score(m):
|
||||||
|
fn = m.get("file_name", "")
|
||||||
|
score = 0
|
||||||
|
# Match -0001.safetensors, -1234.safetensors etc.
|
||||||
|
if re.search(r"-\d{4}\.", fn):
|
||||||
|
score += 100
|
||||||
|
# Match (1), (2) etc.
|
||||||
|
if re.search(r"\(\d+\)", fn):
|
||||||
|
score += 50
|
||||||
|
# Match 'copy' in filename
|
||||||
|
if "copy" in fn.lower():
|
||||||
|
score += 50
|
||||||
|
# Longer filenames are more likely copies
|
||||||
|
score += len(fn) - min_len
|
||||||
|
return (score, fn.lower())
|
||||||
|
|
||||||
|
return sorted(models, key=copy_score)
|
||||||
|
|
||||||
async def find_filename_conflicts(self, request: web.Request) -> web.Response:
|
async def find_filename_conflicts(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
duplicates = self._service.find_duplicate_filenames()
|
duplicates = self._service.find_duplicate_filenames()
|
||||||
|
|||||||
@@ -33,6 +33,10 @@ class PreviewHandler:
|
|||||||
raise web.HTTPBadRequest(text="Invalid preview path encoding") from exc
|
raise web.HTTPBadRequest(text="Invalid preview path encoding") from exc
|
||||||
|
|
||||||
normalized = decoded_path.replace("\\", "/")
|
normalized = decoded_path.replace("\\", "/")
|
||||||
|
|
||||||
|
if not self._config.is_preview_path_allowed(normalized):
|
||||||
|
raise web.HTTPForbidden(text="Preview path is not within an allowed directory")
|
||||||
|
|
||||||
candidate = Path(normalized)
|
candidate = Path(normalized)
|
||||||
try:
|
try:
|
||||||
resolved = candidate.expanduser().resolve(strict=False)
|
resolved = candidate.expanduser().resolve(strict=False)
|
||||||
@@ -40,12 +44,8 @@ class PreviewHandler:
|
|||||||
logger.debug("Failed to resolve preview path %s: %s", normalized, exc)
|
logger.debug("Failed to resolve preview path %s: %s", normalized, exc)
|
||||||
raise web.HTTPBadRequest(text="Unable to resolve preview path") from exc
|
raise web.HTTPBadRequest(text="Unable to resolve preview path") from exc
|
||||||
|
|
||||||
resolved_str = str(resolved)
|
|
||||||
if not self._config.is_preview_path_allowed(resolved_str):
|
|
||||||
raise web.HTTPForbidden(text="Preview path is not within an allowed directory")
|
|
||||||
|
|
||||||
if not resolved.is_file():
|
if not resolved.is_file():
|
||||||
logger.debug("Preview file not found at %s", resolved_str)
|
logger.debug("Preview file not found at %s", str(resolved))
|
||||||
raise web.HTTPNotFound(text="Preview file not found")
|
raise web.HTTPNotFound(text="Preview file not found")
|
||||||
|
|
||||||
# aiohttp's FileResponse handles range requests and content headers for us.
|
# aiohttp's FileResponse handles range requests and content headers for us.
|
||||||
|
|||||||
@@ -412,10 +412,11 @@ class RecipeQueryHandler:
|
|||||||
if recipe_scanner is None:
|
if recipe_scanner is None:
|
||||||
raise RuntimeError("Recipe scanner unavailable")
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
duplicate_groups = await recipe_scanner.find_all_duplicate_recipes()
|
fingerprint_groups = await recipe_scanner.find_all_duplicate_recipes()
|
||||||
|
url_groups = await recipe_scanner.find_duplicate_recipes_by_source()
|
||||||
response_data = []
|
response_data = []
|
||||||
|
|
||||||
for fingerprint, recipe_ids in duplicate_groups.items():
|
for fingerprint, recipe_ids in fingerprint_groups.items():
|
||||||
if len(recipe_ids) <= 1:
|
if len(recipe_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -439,12 +440,44 @@ class RecipeQueryHandler:
|
|||||||
recipes.sort(key=lambda entry: entry.get("modified", 0), reverse=True)
|
recipes.sort(key=lambda entry: entry.get("modified", 0), reverse=True)
|
||||||
response_data.append(
|
response_data.append(
|
||||||
{
|
{
|
||||||
|
"type": "fingerprint",
|
||||||
"fingerprint": fingerprint,
|
"fingerprint": fingerprint,
|
||||||
"count": len(recipes),
|
"count": len(recipes),
|
||||||
"recipes": recipes,
|
"recipes": recipes,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for url, recipe_ids in url_groups.items():
|
||||||
|
if len(recipe_ids) <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
recipes = []
|
||||||
|
for recipe_id in recipe_ids:
|
||||||
|
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
if recipe:
|
||||||
|
recipes.append(
|
||||||
|
{
|
||||||
|
"id": recipe.get("id"),
|
||||||
|
"title": recipe.get("title"),
|
||||||
|
"file_url": recipe.get("file_url")
|
||||||
|
or self._format_recipe_file_url(recipe.get("file_path", "")),
|
||||||
|
"modified": recipe.get("modified"),
|
||||||
|
"created_date": recipe.get("created_date"),
|
||||||
|
"lora_count": len(recipe.get("loras", [])),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(recipes) >= 2:
|
||||||
|
recipes.sort(key=lambda entry: entry.get("modified", 0), reverse=True)
|
||||||
|
response_data.append(
|
||||||
|
{
|
||||||
|
"type": "source_url",
|
||||||
|
"fingerprint": url,
|
||||||
|
"count": len(recipes),
|
||||||
|
"recipes": recipes,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
response_data.sort(key=lambda entry: entry["count"], reverse=True)
|
response_data.sort(key=lambda entry: entry["count"], reverse=True)
|
||||||
return web.json_response({"success": True, "duplicate_groups": response_data})
|
return web.json_response({"success": True, "duplicate_groups": response_data})
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
@@ -1021,7 +1054,7 @@ class RecipeManagementHandler:
|
|||||||
"exclude": False,
|
"exclude": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _download_remote_media(self, image_url: str) -> tuple[bytes, str]:
|
async def _download_remote_media(self, image_url: str) -> tuple[bytes, str, Any]:
|
||||||
civitai_client = self._civitai_client_getter()
|
civitai_client = self._civitai_client_getter()
|
||||||
downloader = await self._downloader_factory()
|
downloader = await self._downloader_factory()
|
||||||
temp_path = None
|
temp_path = None
|
||||||
@@ -1029,6 +1062,7 @@ class RecipeManagementHandler:
|
|||||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||||
temp_path = temp_file.name
|
temp_path = temp_file.name
|
||||||
download_url = image_url
|
download_url = image_url
|
||||||
|
image_info = None
|
||||||
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", image_url)
|
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", image_url)
|
||||||
if civitai_match:
|
if civitai_match:
|
||||||
if civitai_client is None:
|
if civitai_client is None:
|
||||||
|
|||||||
@@ -81,6 +81,7 @@ class BaseModelService(ABC):
|
|||||||
update_available_only: bool = False,
|
update_available_only: bool = False,
|
||||||
credit_required: Optional[bool] = None,
|
credit_required: Optional[bool] = None,
|
||||||
allow_selling_generated_content: Optional[bool] = None,
|
allow_selling_generated_content: Optional[bool] = None,
|
||||||
|
tag_logic: str = "any",
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Dict:
|
) -> Dict:
|
||||||
"""Get paginated and filtered model data"""
|
"""Get paginated and filtered model data"""
|
||||||
@@ -109,6 +110,7 @@ class BaseModelService(ABC):
|
|||||||
tags=tags,
|
tags=tags,
|
||||||
favorites_only=favorites_only,
|
favorites_only=favorites_only,
|
||||||
search_options=search_options,
|
search_options=search_options,
|
||||||
|
tag_logic=tag_logic,
|
||||||
)
|
)
|
||||||
|
|
||||||
if search:
|
if search:
|
||||||
@@ -241,6 +243,7 @@ class BaseModelService(ABC):
|
|||||||
tags: Optional[Dict[str, str]] = None,
|
tags: Optional[Dict[str, str]] = None,
|
||||||
favorites_only: bool = False,
|
favorites_only: bool = False,
|
||||||
search_options: dict = None,
|
search_options: dict = None,
|
||||||
|
tag_logic: str = "any",
|
||||||
) -> List[Dict]:
|
) -> List[Dict]:
|
||||||
"""Apply common filters that work across all model types"""
|
"""Apply common filters that work across all model types"""
|
||||||
normalized_options = self.search_strategy.normalize_options(search_options)
|
normalized_options = self.search_strategy.normalize_options(search_options)
|
||||||
@@ -253,6 +256,7 @@ class BaseModelService(ABC):
|
|||||||
tags=tags,
|
tags=tags,
|
||||||
favorites_only=favorites_only,
|
favorites_only=favorites_only,
|
||||||
search_options=normalized_options,
|
search_options=normalized_options,
|
||||||
|
tag_logic=tag_logic,
|
||||||
)
|
)
|
||||||
return self.filter_set.apply(data, criteria)
|
return self.filter_set.apply(data, criteria)
|
||||||
|
|
||||||
|
|||||||
259
py/services/cache_entry_validator.py
Normal file
259
py/services/cache_entry_validator.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
"""
|
||||||
|
Cache Entry Validator
|
||||||
|
|
||||||
|
Validates and repairs cache entries to prevent runtime errors from
|
||||||
|
missing or invalid critical fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ValidationResult:
|
||||||
|
"""Result of validating a single cache entry."""
|
||||||
|
is_valid: bool
|
||||||
|
repaired: bool
|
||||||
|
errors: List[str] = field(default_factory=list)
|
||||||
|
entry: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CacheEntryValidator:
|
||||||
|
"""
|
||||||
|
Validates and repairs cache entry core fields.
|
||||||
|
|
||||||
|
Critical fields that cause runtime errors when missing:
|
||||||
|
- file_path: KeyError in multiple locations
|
||||||
|
- sha256: KeyError/AttributeError in hash operations
|
||||||
|
|
||||||
|
Medium severity fields that may cause sorting/display issues:
|
||||||
|
- size: KeyError during sorting
|
||||||
|
- modified: KeyError during sorting
|
||||||
|
- model_name: AttributeError on .lower() calls
|
||||||
|
|
||||||
|
Low severity fields:
|
||||||
|
- tags: KeyError/TypeError in recipe operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Field definitions: (default_value, is_required)
|
||||||
|
CORE_FIELDS: Dict[str, Tuple[Any, bool]] = {
|
||||||
|
'file_path': ('', True),
|
||||||
|
'sha256': ('', True),
|
||||||
|
'file_name': ('', False),
|
||||||
|
'model_name': ('', False),
|
||||||
|
'folder': ('', False),
|
||||||
|
'size': (0, False),
|
||||||
|
'modified': (0.0, False),
|
||||||
|
'tags': ([], False),
|
||||||
|
'preview_url': ('', False),
|
||||||
|
'base_model': ('', False),
|
||||||
|
'from_civitai': (True, False),
|
||||||
|
'favorite': (False, False),
|
||||||
|
'exclude': (False, False),
|
||||||
|
'db_checked': (False, False),
|
||||||
|
'preview_nsfw_level': (0, False),
|
||||||
|
'notes': ('', False),
|
||||||
|
'usage_tips': ('', False),
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, entry: Dict[str, Any], *, auto_repair: bool = True) -> ValidationResult:
|
||||||
|
"""
|
||||||
|
Validate a single cache entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entry: The cache entry dictionary to validate
|
||||||
|
auto_repair: If True, attempt to repair missing/invalid fields
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ValidationResult with validation status and optionally repaired entry
|
||||||
|
"""
|
||||||
|
if entry is None:
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=False,
|
||||||
|
errors=['Entry is None'],
|
||||||
|
entry=None
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=False,
|
||||||
|
errors=[f'Entry is not a dict: {type(entry).__name__}'],
|
||||||
|
entry=None
|
||||||
|
)
|
||||||
|
|
||||||
|
errors: List[str] = []
|
||||||
|
repaired = False
|
||||||
|
working_entry = dict(entry) if auto_repair else entry
|
||||||
|
|
||||||
|
for field_name, (default_value, is_required) in cls.CORE_FIELDS.items():
|
||||||
|
value = working_entry.get(field_name)
|
||||||
|
|
||||||
|
# Check if field is missing or None
|
||||||
|
if value is None:
|
||||||
|
if is_required:
|
||||||
|
errors.append(f"Required field '{field_name}' is missing or None")
|
||||||
|
if auto_repair:
|
||||||
|
working_entry[field_name] = cls._get_default_copy(default_value)
|
||||||
|
repaired = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Validate field type and value
|
||||||
|
field_error = cls._validate_field(field_name, value, default_value)
|
||||||
|
if field_error:
|
||||||
|
errors.append(field_error)
|
||||||
|
if auto_repair:
|
||||||
|
working_entry[field_name] = cls._get_default_copy(default_value)
|
||||||
|
repaired = True
|
||||||
|
|
||||||
|
# Special validation: file_path must not be empty for required field
|
||||||
|
file_path = working_entry.get('file_path', '')
|
||||||
|
if not file_path or (isinstance(file_path, str) and not file_path.strip()):
|
||||||
|
errors.append("Required field 'file_path' is empty")
|
||||||
|
# Cannot repair empty file_path - entry is invalid
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special validation: sha256 must not be empty for required field
|
||||||
|
sha256 = working_entry.get('sha256', '')
|
||||||
|
if not sha256 or (isinstance(sha256, str) and not sha256.strip()):
|
||||||
|
errors.append("Required field 'sha256' is empty")
|
||||||
|
# Cannot repair empty sha256 - entry is invalid
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Normalize sha256 to lowercase if needed
|
||||||
|
if isinstance(sha256, str):
|
||||||
|
normalized_sha = sha256.lower().strip()
|
||||||
|
if normalized_sha != sha256:
|
||||||
|
working_entry['sha256'] = normalized_sha
|
||||||
|
repaired = True
|
||||||
|
|
||||||
|
# Determine if entry is valid
|
||||||
|
# Entry is valid if no critical required field errors remain after repair
|
||||||
|
# Critical fields are file_path and sha256
|
||||||
|
CRITICAL_REQUIRED_FIELDS = {'file_path', 'sha256'}
|
||||||
|
has_critical_errors = any(
|
||||||
|
"Required field" in error and
|
||||||
|
any(f"'{field}'" in error for field in CRITICAL_REQUIRED_FIELDS)
|
||||||
|
for error in errors
|
||||||
|
)
|
||||||
|
|
||||||
|
is_valid = not has_critical_errors
|
||||||
|
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=is_valid,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else entry
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_batch(
|
||||||
|
cls,
|
||||||
|
entries: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
auto_repair: bool = True
|
||||||
|
) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
||||||
|
"""
|
||||||
|
Validate a batch of cache entries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entries: List of cache entry dictionaries to validate
|
||||||
|
auto_repair: If True, attempt to repair missing/invalid fields
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (valid_entries, invalid_entries)
|
||||||
|
"""
|
||||||
|
if not entries:
|
||||||
|
return [], []
|
||||||
|
|
||||||
|
valid_entries: List[Dict[str, Any]] = []
|
||||||
|
invalid_entries: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
result = cls.validate(entry, auto_repair=auto_repair)
|
||||||
|
|
||||||
|
if result.is_valid:
|
||||||
|
# Use repaired entry if available, otherwise original
|
||||||
|
valid_entries.append(result.entry if result.entry else entry)
|
||||||
|
else:
|
||||||
|
invalid_entries.append(entry)
|
||||||
|
# Log invalid entries for debugging
|
||||||
|
file_path = entry.get('file_path', '<unknown>') if isinstance(entry, dict) else '<not a dict>'
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid cache entry for '{file_path}': {', '.join(result.errors)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return valid_entries, invalid_entries
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _validate_field(cls, field_name: str, value: Any, default_value: Any) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Validate a specific field value.
|
||||||
|
|
||||||
|
Returns an error message if invalid, None if valid.
|
||||||
|
"""
|
||||||
|
expected_type = type(default_value)
|
||||||
|
|
||||||
|
# Special handling for numeric types
|
||||||
|
if expected_type == int:
|
||||||
|
if not isinstance(value, (int, float)):
|
||||||
|
return f"Field '{field_name}' should be numeric, got {type(value).__name__}"
|
||||||
|
elif expected_type == float:
|
||||||
|
if not isinstance(value, (int, float)):
|
||||||
|
return f"Field '{field_name}' should be numeric, got {type(value).__name__}"
|
||||||
|
elif expected_type == bool:
|
||||||
|
# Be lenient with boolean fields - accept truthy/falsy values
|
||||||
|
pass
|
||||||
|
elif expected_type == str:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
return f"Field '{field_name}' should be string, got {type(value).__name__}"
|
||||||
|
elif expected_type == list:
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return f"Field '{field_name}' should be list, got {type(value).__name__}"
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_default_copy(cls, default_value: Any) -> Any:
|
||||||
|
"""Get a copy of the default value to avoid shared mutable state."""
|
||||||
|
if isinstance(default_value, list):
|
||||||
|
return list(default_value)
|
||||||
|
if isinstance(default_value, dict):
|
||||||
|
return dict(default_value)
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_file_path_safe(cls, entry: Dict[str, Any], default: str = '') -> str:
|
||||||
|
"""Safely get file_path from an entry."""
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return default
|
||||||
|
value = entry.get('file_path')
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value
|
||||||
|
return default
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_sha256_safe(cls, entry: Dict[str, Any], default: str = '') -> str:
|
||||||
|
"""Safely get sha256 from an entry."""
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return default
|
||||||
|
value = entry.get('sha256')
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value.lower()
|
||||||
|
return default
|
||||||
201
py/services/cache_health_monitor.py
Normal file
201
py/services/cache_health_monitor.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
"""
|
||||||
|
Cache Health Monitor
|
||||||
|
|
||||||
|
Monitors cache health status and determines when user intervention is needed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .cache_entry_validator import CacheEntryValidator, ValidationResult
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheHealthStatus(Enum):
|
||||||
|
"""Health status of the cache."""
|
||||||
|
HEALTHY = "healthy"
|
||||||
|
DEGRADED = "degraded"
|
||||||
|
CORRUPTED = "corrupted"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HealthReport:
|
||||||
|
"""Report of cache health check."""
|
||||||
|
status: CacheHealthStatus
|
||||||
|
total_entries: int
|
||||||
|
valid_entries: int
|
||||||
|
invalid_entries: int
|
||||||
|
repaired_entries: int
|
||||||
|
invalid_paths: List[str] = field(default_factory=list)
|
||||||
|
message: str = ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def corruption_rate(self) -> float:
|
||||||
|
"""Calculate the percentage of invalid entries."""
|
||||||
|
if self.total_entries <= 0:
|
||||||
|
return 0.0
|
||||||
|
return self.invalid_entries / self.total_entries
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert to dictionary for JSON serialization."""
|
||||||
|
return {
|
||||||
|
'status': self.status.value,
|
||||||
|
'total_entries': self.total_entries,
|
||||||
|
'valid_entries': self.valid_entries,
|
||||||
|
'invalid_entries': self.invalid_entries,
|
||||||
|
'repaired_entries': self.repaired_entries,
|
||||||
|
'corruption_rate': f"{self.corruption_rate:.1%}",
|
||||||
|
'invalid_paths': self.invalid_paths[:10], # Limit to first 10
|
||||||
|
'message': self.message,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CacheHealthMonitor:
|
||||||
|
"""
|
||||||
|
Monitors cache health and determines appropriate status.
|
||||||
|
|
||||||
|
Thresholds:
|
||||||
|
- HEALTHY: 0% invalid entries
|
||||||
|
- DEGRADED: 0-5% invalid entries (auto-repaired, user should rebuild)
|
||||||
|
- CORRUPTED: >5% invalid entries (significant data loss likely)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Threshold percentages
|
||||||
|
DEGRADED_THRESHOLD = 0.01 # 1% - show warning
|
||||||
|
CORRUPTED_THRESHOLD = 0.05 # 5% - critical warning
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
degraded_threshold: float = DEGRADED_THRESHOLD,
|
||||||
|
corrupted_threshold: float = CORRUPTED_THRESHOLD
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the health monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
degraded_threshold: Corruption rate threshold for DEGRADED status
|
||||||
|
corrupted_threshold: Corruption rate threshold for CORRUPTED status
|
||||||
|
"""
|
||||||
|
self.degraded_threshold = degraded_threshold
|
||||||
|
self.corrupted_threshold = corrupted_threshold
|
||||||
|
|
||||||
|
def check_health(
|
||||||
|
self,
|
||||||
|
entries: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
auto_repair: bool = True
|
||||||
|
) -> HealthReport:
|
||||||
|
"""
|
||||||
|
Check the health of cache entries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entries: List of cache entry dictionaries to check
|
||||||
|
auto_repair: If True, attempt to repair entries during validation
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
HealthReport with status and statistics
|
||||||
|
"""
|
||||||
|
if not entries:
|
||||||
|
return HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=0,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
total_entries = len(entries)
|
||||||
|
valid_entries: List[Dict[str, Any]] = []
|
||||||
|
invalid_entries: List[Dict[str, Any]] = []
|
||||||
|
repaired_count = 0
|
||||||
|
invalid_paths: List[str] = []
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=auto_repair)
|
||||||
|
|
||||||
|
if result.is_valid:
|
||||||
|
valid_entries.append(result.entry if result.entry else entry)
|
||||||
|
if result.repaired:
|
||||||
|
repaired_count += 1
|
||||||
|
else:
|
||||||
|
invalid_entries.append(entry)
|
||||||
|
# Extract file path for reporting
|
||||||
|
file_path = CacheEntryValidator.get_file_path_safe(entry, '<unknown>')
|
||||||
|
invalid_paths.append(file_path)
|
||||||
|
|
||||||
|
invalid_count = len(invalid_entries)
|
||||||
|
valid_count = len(valid_entries)
|
||||||
|
|
||||||
|
# Determine status based on corruption rate
|
||||||
|
corruption_rate = invalid_count / total_entries if total_entries > 0 else 0.0
|
||||||
|
|
||||||
|
if invalid_count == 0:
|
||||||
|
status = CacheHealthStatus.HEALTHY
|
||||||
|
message = "Cache is healthy"
|
||||||
|
elif corruption_rate >= self.corrupted_threshold:
|
||||||
|
status = CacheHealthStatus.CORRUPTED
|
||||||
|
message = (
|
||||||
|
f"Cache is corrupted: {invalid_count} invalid entries "
|
||||||
|
f"({corruption_rate:.1%}). Rebuild recommended."
|
||||||
|
)
|
||||||
|
elif corruption_rate >= self.degraded_threshold or invalid_count > 0:
|
||||||
|
status = CacheHealthStatus.DEGRADED
|
||||||
|
message = (
|
||||||
|
f"Cache has {invalid_count} invalid entries "
|
||||||
|
f"({corruption_rate:.1%}). Consider rebuilding cache."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# This shouldn't happen, but handle gracefully
|
||||||
|
status = CacheHealthStatus.HEALTHY
|
||||||
|
message = "Cache is healthy"
|
||||||
|
|
||||||
|
# Log the health check result
|
||||||
|
if status != CacheHealthStatus.HEALTHY:
|
||||||
|
logger.warning(
|
||||||
|
f"Cache health check: {status.value} - "
|
||||||
|
f"{invalid_count}/{total_entries} invalid, "
|
||||||
|
f"{repaired_count} repaired"
|
||||||
|
)
|
||||||
|
if invalid_paths:
|
||||||
|
logger.debug(f"Invalid entry paths: {invalid_paths[:5]}")
|
||||||
|
|
||||||
|
return HealthReport(
|
||||||
|
status=status,
|
||||||
|
total_entries=total_entries,
|
||||||
|
valid_entries=valid_count,
|
||||||
|
invalid_entries=invalid_count,
|
||||||
|
repaired_entries=repaired_count,
|
||||||
|
invalid_paths=invalid_paths,
|
||||||
|
message=message
|
||||||
|
)
|
||||||
|
|
||||||
|
def should_notify_user(self, report: HealthReport) -> bool:
|
||||||
|
"""
|
||||||
|
Determine if the user should be notified about cache health.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: The health report to evaluate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if user should be notified
|
||||||
|
"""
|
||||||
|
return report.status != CacheHealthStatus.HEALTHY
|
||||||
|
|
||||||
|
def get_notification_severity(self, report: HealthReport) -> str:
|
||||||
|
"""
|
||||||
|
Get the severity level for user notification.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: The health report to evaluate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Severity string: 'warning' or 'error'
|
||||||
|
"""
|
||||||
|
if report.status == CacheHealthStatus.CORRUPTED:
|
||||||
|
return 'error'
|
||||||
|
return 'warning'
|
||||||
@@ -30,36 +30,36 @@ class LoraScanner(ModelScanner):
|
|||||||
|
|
||||||
async def diagnose_hash_index(self):
|
async def diagnose_hash_index(self):
|
||||||
"""Diagnostic method to verify hash index functionality"""
|
"""Diagnostic method to verify hash index functionality"""
|
||||||
print("\n\n*** DIAGNOSING LORA HASH INDEX ***\n\n", file=sys.stderr)
|
logger.debug("\n\n*** DIAGNOSING LORA HASH INDEX ***\n\n")
|
||||||
|
|
||||||
# First check if the hash index has any entries
|
# First check if the hash index has any entries
|
||||||
if hasattr(self, '_hash_index'):
|
if hasattr(self, '_hash_index'):
|
||||||
index_entries = len(self._hash_index._hash_to_path)
|
index_entries = len(self._hash_index._hash_to_path)
|
||||||
print(f"Hash index has {index_entries} entries", file=sys.stderr)
|
logger.debug(f"Hash index has {index_entries} entries")
|
||||||
|
|
||||||
# Print a few example entries if available
|
# Print a few example entries if available
|
||||||
if index_entries > 0:
|
if index_entries > 0:
|
||||||
print("\nSample hash index entries:", file=sys.stderr)
|
logger.debug("\nSample hash index entries:")
|
||||||
count = 0
|
count = 0
|
||||||
for hash_val, path in self._hash_index._hash_to_path.items():
|
for hash_val, path in self._hash_index._hash_to_path.items():
|
||||||
if count < 5: # Just show the first 5
|
if count < 5: # Just show the first 5
|
||||||
print(f"Hash: {hash_val[:8]}... -> Path: {path}", file=sys.stderr)
|
logger.debug(f"Hash: {hash_val[:8]}... -> Path: {path}")
|
||||||
count += 1
|
count += 1
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print("Hash index not initialized", file=sys.stderr)
|
logger.debug("Hash index not initialized")
|
||||||
|
|
||||||
# Try looking up by a known hash for testing
|
# Try looking up by a known hash for testing
|
||||||
if not hasattr(self, '_hash_index') or not self._hash_index._hash_to_path:
|
if not hasattr(self, '_hash_index') or not self._hash_index._hash_to_path:
|
||||||
print("No hash entries to test lookup with", file=sys.stderr)
|
logger.debug("No hash entries to test lookup with")
|
||||||
return
|
return
|
||||||
|
|
||||||
test_hash = next(iter(self._hash_index._hash_to_path.keys()))
|
test_hash = next(iter(self._hash_index._hash_to_path.keys()))
|
||||||
test_path = self._hash_index.get_path(test_hash)
|
test_path = self._hash_index.get_path(test_hash)
|
||||||
print(f"\nTest lookup by hash: {test_hash[:8]}... -> {test_path}", file=sys.stderr)
|
logger.debug(f"\nTest lookup by hash: {test_hash[:8]}... -> {test_path}")
|
||||||
|
|
||||||
# Also test reverse lookup
|
# Also test reverse lookup
|
||||||
test_hash_result = self._hash_index.get_hash(test_path)
|
test_hash_result = self._hash_index.get_hash(test_path)
|
||||||
print(f"Test reverse lookup: {test_path} -> {test_hash_result[:8]}...\n\n", file=sys.stderr)
|
logger.debug(f"Test reverse lookup: {test_path} -> {test_hash_result[:8]}...\n\n")
|
||||||
|
|
||||||
|
|||||||
@@ -44,6 +44,8 @@ async def initialize_metadata_providers():
|
|||||||
logger.debug(f"SQLite metadata provider registered with database: {db_path}")
|
logger.debug(f"SQLite metadata provider registered with database: {db_path}")
|
||||||
else:
|
else:
|
||||||
logger.warning("Metadata archive database is enabled but database file not found")
|
logger.warning("Metadata archive database is enabled but database file not found")
|
||||||
|
logger.info("Automatically disabling enable_metadata_archive_db setting")
|
||||||
|
settings_manager.set('enable_metadata_archive_db', False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to initialize SQLite metadata provider: {e}")
|
logger.error(f"Failed to initialize SQLite metadata provider: {e}")
|
||||||
|
|
||||||
|
|||||||
@@ -243,17 +243,27 @@ class MetadataSyncService:
|
|||||||
last_error = error or last_error
|
last_error = error or last_error
|
||||||
|
|
||||||
if civitai_metadata is None or metadata_provider is None:
|
if civitai_metadata is None or metadata_provider is None:
|
||||||
|
# Track if we need to save metadata
|
||||||
|
needs_save = False
|
||||||
|
|
||||||
if sqlite_attempted:
|
if sqlite_attempted:
|
||||||
model_data["db_checked"] = True
|
model_data["db_checked"] = True
|
||||||
|
needs_save = True
|
||||||
|
|
||||||
if civitai_api_not_found:
|
if civitai_api_not_found:
|
||||||
model_data["from_civitai"] = False
|
model_data["from_civitai"] = False
|
||||||
model_data["civitai_deleted"] = True
|
model_data["civitai_deleted"] = True
|
||||||
model_data["db_checked"] = sqlite_attempted or (enable_archive and model_data.get("db_checked", False))
|
model_data["db_checked"] = sqlite_attempted or (enable_archive and model_data.get("db_checked", False))
|
||||||
model_data["last_checked_at"] = datetime.now().timestamp()
|
model_data["last_checked_at"] = datetime.now().timestamp()
|
||||||
|
needs_save = True
|
||||||
|
|
||||||
|
# Save metadata if any state was updated
|
||||||
|
if needs_save:
|
||||||
data_to_save = model_data.copy()
|
data_to_save = model_data.copy()
|
||||||
data_to_save.pop("folder", None)
|
data_to_save.pop("folder", None)
|
||||||
|
# Update last_checked_at for sqlite-only attempts if not already set
|
||||||
|
if "last_checked_at" not in data_to_save:
|
||||||
|
data_to_save["last_checked_at"] = datetime.now().timestamp()
|
||||||
await self._metadata_manager.save_metadata(file_path, data_to_save)
|
await self._metadata_manager.save_metadata(file_path, data_to_save)
|
||||||
|
|
||||||
default_error = (
|
default_error = (
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from operator import itemgetter
|
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
|
|
||||||
# Supported sort modes: (sort_key, order)
|
# Supported sort modes: (sort_key, order)
|
||||||
@@ -229,17 +228,17 @@ class ModelCache:
|
|||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'date':
|
elif sort_key == 'date':
|
||||||
# Sort by modified timestamp
|
# Sort by modified timestamp (use .get() with default to handle missing fields)
|
||||||
result = sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('modified'),
|
key=lambda x: x.get('modified', 0.0),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'size':
|
elif sort_key == 'size':
|
||||||
# Sort by file size
|
# Sort by file size (use .get() with default to handle missing fields)
|
||||||
result = sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('size'),
|
key=lambda x: x.get('size', 0),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'usage':
|
elif sort_key == 'usage':
|
||||||
|
|||||||
@@ -676,10 +676,12 @@ class ModelMetadataProviderManager:
|
|||||||
|
|
||||||
def _get_provider(self, provider_name: str = None) -> ModelMetadataProvider:
|
def _get_provider(self, provider_name: str = None) -> ModelMetadataProvider:
|
||||||
"""Get provider by name or default provider"""
|
"""Get provider by name or default provider"""
|
||||||
if provider_name and provider_name in self.providers:
|
if provider_name:
|
||||||
|
if provider_name not in self.providers:
|
||||||
|
raise ValueError(f"Provider '{provider_name}' is not registered")
|
||||||
return self.providers[provider_name]
|
return self.providers[provider_name]
|
||||||
|
|
||||||
if self.default_provider is None:
|
if self.default_provider is None:
|
||||||
raise ValueError("No default provider set and no valid provider specified")
|
raise ValueError("No default provider set and no valid provider specified")
|
||||||
|
|
||||||
return self.providers[self.default_provider]
|
return self.providers[self.default_provider]
|
||||||
|
|||||||
@@ -99,6 +99,7 @@ class FilterCriteria:
|
|||||||
favorites_only: bool = False
|
favorites_only: bool = False
|
||||||
search_options: Optional[Dict[str, Any]] = None
|
search_options: Optional[Dict[str, Any]] = None
|
||||||
model_types: Optional[Sequence[str]] = None
|
model_types: Optional[Sequence[str]] = None
|
||||||
|
tag_logic: str = "any" # "any" (OR) or "all" (AND)
|
||||||
|
|
||||||
|
|
||||||
class ModelCacheRepository:
|
class ModelCacheRepository:
|
||||||
@@ -300,11 +301,29 @@ class ModelFilterSet:
|
|||||||
include_tags = {tag for tag in tag_filters if tag}
|
include_tags = {tag for tag in tag_filters if tag}
|
||||||
|
|
||||||
if include_tags:
|
if include_tags:
|
||||||
|
tag_logic = criteria.tag_logic.lower() if criteria.tag_logic else "any"
|
||||||
|
|
||||||
def matches_include(item_tags):
|
def matches_include(item_tags):
|
||||||
if not item_tags and "__no_tags__" in include_tags:
|
if not item_tags and "__no_tags__" in include_tags:
|
||||||
return True
|
return True
|
||||||
return any(tag in include_tags for tag in (item_tags or []))
|
if tag_logic == "all":
|
||||||
|
# AND logic: item must have ALL include tags
|
||||||
|
# Special case: __no_tags__ is handled separately
|
||||||
|
non_special_tags = include_tags - {"__no_tags__"}
|
||||||
|
if "__no_tags__" in include_tags:
|
||||||
|
# If __no_tags__ is selected along with other tags,
|
||||||
|
# treat it as "no tags OR (all other tags)"
|
||||||
|
if not item_tags:
|
||||||
|
return True
|
||||||
|
# Otherwise, check if all non-special tags match
|
||||||
|
if non_special_tags:
|
||||||
|
return all(tag in (item_tags or []) for tag in non_special_tags)
|
||||||
|
return True
|
||||||
|
# Normal case: all tags must match
|
||||||
|
return all(tag in (item_tags or []) for tag in non_special_tags)
|
||||||
|
else:
|
||||||
|
# OR logic (default): item must have ANY include tag
|
||||||
|
return any(tag in include_tags for tag in (item_tags or []))
|
||||||
|
|
||||||
items = [item for item in items if matches_include(item.get("tags"))]
|
items = [item for item in items if matches_include(item.get("tags"))]
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ from .service_registry import ServiceRegistry
|
|||||||
from .websocket_manager import ws_manager
|
from .websocket_manager import ws_manager
|
||||||
from .persistent_model_cache import get_persistent_cache
|
from .persistent_model_cache import get_persistent_cache
|
||||||
from .settings_manager import get_settings_manager
|
from .settings_manager import get_settings_manager
|
||||||
|
from .cache_entry_validator import CacheEntryValidator
|
||||||
|
from .cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -468,6 +470,39 @@ class ModelScanner:
|
|||||||
for tag in adjusted_item.get('tags') or []:
|
for tag in adjusted_item.get('tags') or []:
|
||||||
tags_count[tag] = tags_count.get(tag, 0) + 1
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
# Validate cache entries and check health
|
||||||
|
valid_entries, invalid_entries = CacheEntryValidator.validate_batch(
|
||||||
|
adjusted_raw_data, auto_repair=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if invalid_entries:
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(adjusted_raw_data, auto_repair=True)
|
||||||
|
|
||||||
|
if report.status != CacheHealthStatus.HEALTHY:
|
||||||
|
# Broadcast health warning to frontend
|
||||||
|
await ws_manager.broadcast_cache_health_warning(report, page_type)
|
||||||
|
logger.warning(
|
||||||
|
f"{self.model_type.capitalize()} Scanner: Cache health issue detected - "
|
||||||
|
f"{report.invalid_entries} invalid entries, {report.repaired_entries} repaired"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use only valid entries
|
||||||
|
adjusted_raw_data = valid_entries
|
||||||
|
|
||||||
|
# Rebuild tags count from valid entries only
|
||||||
|
tags_count = {}
|
||||||
|
for item in adjusted_raw_data:
|
||||||
|
for tag in item.get('tags') or []:
|
||||||
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
# Remove invalid entries from hash index
|
||||||
|
for invalid_entry in invalid_entries:
|
||||||
|
file_path = CacheEntryValidator.get_file_path_safe(invalid_entry)
|
||||||
|
sha256 = CacheEntryValidator.get_sha256_safe(invalid_entry)
|
||||||
|
if file_path:
|
||||||
|
hash_index.remove_by_path(file_path, sha256)
|
||||||
|
|
||||||
scan_result = CacheBuildResult(
|
scan_result = CacheBuildResult(
|
||||||
raw_data=adjusted_raw_data,
|
raw_data=adjusted_raw_data,
|
||||||
hash_index=hash_index,
|
hash_index=hash_index,
|
||||||
@@ -651,7 +686,6 @@ class ModelScanner:
|
|||||||
|
|
||||||
async def _initialize_cache(self) -> None:
|
async def _initialize_cache(self) -> None:
|
||||||
"""Initialize or refresh the cache"""
|
"""Initialize or refresh the cache"""
|
||||||
print("init start", flush=True)
|
|
||||||
self._is_initializing = True # Set flag
|
self._is_initializing = True # Set flag
|
||||||
try:
|
try:
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
@@ -665,7 +699,6 @@ class ModelScanner:
|
|||||||
scan_result = await self._gather_model_data()
|
scan_result = await self._gather_model_data()
|
||||||
await self._apply_scan_result(scan_result)
|
await self._apply_scan_result(scan_result)
|
||||||
await self._save_persistent_cache(scan_result)
|
await self._save_persistent_cache(scan_result)
|
||||||
print("init end", flush=True)
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, "
|
f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, "
|
||||||
@@ -776,6 +809,18 @@ class ModelScanner:
|
|||||||
model_data = self.adjust_cached_entry(dict(model_data))
|
model_data = self.adjust_cached_entry(dict(model_data))
|
||||||
if not model_data:
|
if not model_data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Validate the new entry before adding
|
||||||
|
validation_result = CacheEntryValidator.validate(
|
||||||
|
model_data, auto_repair=True
|
||||||
|
)
|
||||||
|
if not validation_result.is_valid:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping invalid entry during reconcile: {path}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
model_data = validation_result.entry
|
||||||
|
|
||||||
self._ensure_license_flags(model_data)
|
self._ensure_license_flags(model_data)
|
||||||
# Add to cache
|
# Add to cache
|
||||||
self._cache.raw_data.append(model_data)
|
self._cache.raw_data.append(model_data)
|
||||||
@@ -1090,6 +1135,17 @@ class ModelScanner:
|
|||||||
processed_files += 1
|
processed_files += 1
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
|
# Validate the entry before adding
|
||||||
|
validation_result = CacheEntryValidator.validate(
|
||||||
|
result, auto_repair=True
|
||||||
|
)
|
||||||
|
if not validation_result.is_valid:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping invalid scan result: {file_path}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
result = validation_result.entry
|
||||||
|
|
||||||
self._ensure_license_flags(result)
|
self._ensure_license_flags(result)
|
||||||
raw_data.append(result)
|
raw_data.append(result)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple
|
|||||||
from ..config import config
|
from ..config import config
|
||||||
from .recipe_cache import RecipeCache
|
from .recipe_cache import RecipeCache
|
||||||
from .recipe_fts_index import RecipeFTSIndex
|
from .recipe_fts_index import RecipeFTSIndex
|
||||||
from .persistent_recipe_cache import PersistentRecipeCache, get_persistent_recipe_cache
|
from .persistent_recipe_cache import PersistentRecipeCache, get_persistent_recipe_cache, PersistedRecipeData
|
||||||
from .service_registry import ServiceRegistry
|
from .service_registry import ServiceRegistry
|
||||||
from .lora_scanner import LoraScanner
|
from .lora_scanner import LoraScanner
|
||||||
from .metadata_service import get_default_metadata_provider
|
from .metadata_service import get_default_metadata_provider
|
||||||
@@ -431,6 +431,16 @@ class RecipeScanner:
|
|||||||
4. Persist results for next startup
|
4. Persist results for next startup
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
# Ensure cache exists to avoid None reference errors
|
||||||
|
if self._cache is None:
|
||||||
|
self._cache = RecipeCache(
|
||||||
|
raw_data=[],
|
||||||
|
sorted_by_name=[],
|
||||||
|
sorted_by_date=[],
|
||||||
|
folders=[],
|
||||||
|
folder_tree={},
|
||||||
|
)
|
||||||
|
|
||||||
# Create a new event loop for this thread
|
# Create a new event loop for this thread
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
@@ -492,7 +502,7 @@ class RecipeScanner:
|
|||||||
|
|
||||||
def _reconcile_recipe_cache(
|
def _reconcile_recipe_cache(
|
||||||
self,
|
self,
|
||||||
persisted: "PersistedRecipeData",
|
persisted: PersistedRecipeData,
|
||||||
recipes_dir: str,
|
recipes_dir: str,
|
||||||
) -> Tuple[List[Dict], bool, Dict[str, str]]:
|
) -> Tuple[List[Dict], bool, Dict[str, str]]:
|
||||||
"""Reconcile persisted cache with current filesystem state.
|
"""Reconcile persisted cache with current filesystem state.
|
||||||
@@ -504,8 +514,6 @@ class RecipeScanner:
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple of (recipes list, changed flag, json_paths dict).
|
Tuple of (recipes list, changed flag, json_paths dict).
|
||||||
"""
|
"""
|
||||||
from .persistent_recipe_cache import PersistedRecipeData
|
|
||||||
|
|
||||||
recipes: List[Dict] = []
|
recipes: List[Dict] = []
|
||||||
json_paths: Dict[str, str] = {}
|
json_paths: Dict[str, str] = {}
|
||||||
changed = False
|
changed = False
|
||||||
@@ -522,32 +530,37 @@ class RecipeScanner:
|
|||||||
except OSError:
|
except OSError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Build lookup of persisted recipes by json_path
|
# Build recipe_id -> recipe lookup (O(n) instead of O(n²))
|
||||||
persisted_by_path: Dict[str, Dict] = {}
|
recipe_by_id: Dict[str, Dict] = {
|
||||||
for recipe in persisted.raw_data:
|
|
||||||
recipe_id = str(recipe.get('id', ''))
|
|
||||||
if recipe_id:
|
|
||||||
# Find the json_path from file_stats
|
|
||||||
for json_path, (mtime, size) in persisted.file_stats.items():
|
|
||||||
if os.path.basename(json_path).startswith(recipe_id):
|
|
||||||
persisted_by_path[json_path] = recipe
|
|
||||||
break
|
|
||||||
|
|
||||||
# Also index by recipe ID for faster lookups
|
|
||||||
persisted_by_id: Dict[str, Dict] = {
|
|
||||||
str(r.get('id', '')): r for r in persisted.raw_data if r.get('id')
|
str(r.get('id', '')): r for r in persisted.raw_data if r.get('id')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Build json_path -> recipe lookup from file_stats (O(m))
|
||||||
|
persisted_by_path: Dict[str, Dict] = {}
|
||||||
|
for json_path in persisted.file_stats.keys():
|
||||||
|
basename = os.path.basename(json_path)
|
||||||
|
if basename.lower().endswith('.recipe.json'):
|
||||||
|
recipe_id = basename[:-len('.recipe.json')]
|
||||||
|
if recipe_id in recipe_by_id:
|
||||||
|
persisted_by_path[json_path] = recipe_by_id[recipe_id]
|
||||||
|
|
||||||
# Process current files
|
# Process current files
|
||||||
for file_path, (current_mtime, current_size) in current_files.items():
|
for file_path, (current_mtime, current_size) in current_files.items():
|
||||||
cached_stats = persisted.file_stats.get(file_path)
|
cached_stats = persisted.file_stats.get(file_path)
|
||||||
|
|
||||||
|
# Extract recipe_id from current file for fallback lookup
|
||||||
|
basename = os.path.basename(file_path)
|
||||||
|
recipe_id_from_file = basename[:-len('.recipe.json')] if basename.lower().endswith('.recipe.json') else None
|
||||||
|
|
||||||
if cached_stats:
|
if cached_stats:
|
||||||
cached_mtime, cached_size = cached_stats
|
cached_mtime, cached_size = cached_stats
|
||||||
# Check if file is unchanged
|
# Check if file is unchanged
|
||||||
if abs(current_mtime - cached_mtime) < 1.0 and current_size == cached_size:
|
if abs(current_mtime - cached_mtime) < 1.0 and current_size == cached_size:
|
||||||
# Use cached data
|
# Try direct path lookup first
|
||||||
cached_recipe = persisted_by_path.get(file_path)
|
cached_recipe = persisted_by_path.get(file_path)
|
||||||
|
# Fallback to recipe_id lookup if path lookup fails
|
||||||
|
if not cached_recipe and recipe_id_from_file:
|
||||||
|
cached_recipe = recipe_by_id.get(recipe_id_from_file)
|
||||||
if cached_recipe:
|
if cached_recipe:
|
||||||
recipe_id = str(cached_recipe.get('id', ''))
|
recipe_id = str(cached_recipe.get('id', ''))
|
||||||
# Track folder from file path
|
# Track folder from file path
|
||||||
@@ -2218,3 +2231,26 @@ class RecipeScanner:
|
|||||||
duplicate_groups = {k: v for k, v in fingerprint_groups.items() if len(v) > 1}
|
duplicate_groups = {k: v for k, v in fingerprint_groups.items() if len(v) > 1}
|
||||||
|
|
||||||
return duplicate_groups
|
return duplicate_groups
|
||||||
|
|
||||||
|
async def find_duplicate_recipes_by_source(self) -> dict:
|
||||||
|
"""Find all recipe duplicates based on source_path (Civitai image URLs)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary where keys are source URLs and values are lists of recipe IDs
|
||||||
|
"""
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
|
||||||
|
url_groups = {}
|
||||||
|
for recipe in cache.raw_data:
|
||||||
|
source_url = recipe.get('source_path', '').strip()
|
||||||
|
if not source_url:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if source_url not in url_groups:
|
||||||
|
url_groups[source_url] = []
|
||||||
|
|
||||||
|
url_groups[source_url].append(recipe.get('id'))
|
||||||
|
|
||||||
|
duplicate_groups = {k: v for k, v in url_groups.items() if len(v) > 1}
|
||||||
|
|
||||||
|
return duplicate_groups
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ CORE_USER_SETTING_KEYS: Tuple[str, ...] = (
|
|||||||
"folder_paths",
|
"folder_paths",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Threshold for aggressive cleanup: if file contains this many default keys, clean it up
|
||||||
|
DEFAULT_KEYS_CLEANUP_THRESHOLD = 10
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_SETTINGS: Dict[str, Any] = {
|
DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||||
"civitai_api_key": "",
|
"civitai_api_key": "",
|
||||||
@@ -63,7 +66,7 @@ DEFAULT_SETTINGS: Dict[str, Any] = {
|
|||||||
"compact_mode": False,
|
"compact_mode": False,
|
||||||
"priority_tags": DEFAULT_PRIORITY_TAG_CONFIG.copy(),
|
"priority_tags": DEFAULT_PRIORITY_TAG_CONFIG.copy(),
|
||||||
"model_name_display": "model_name",
|
"model_name_display": "model_name",
|
||||||
"model_card_footer_action": "example_images",
|
"model_card_footer_action": "replace_preview",
|
||||||
"update_flag_strategy": "same_base",
|
"update_flag_strategy": "same_base",
|
||||||
"auto_organize_exclusions": [],
|
"auto_organize_exclusions": [],
|
||||||
}
|
}
|
||||||
@@ -95,6 +98,9 @@ class SettingsManager:
|
|||||||
if self._needs_initial_save:
|
if self._needs_initial_save:
|
||||||
self._save_settings()
|
self._save_settings()
|
||||||
self._needs_initial_save = False
|
self._needs_initial_save = False
|
||||||
|
else:
|
||||||
|
# Clean up existing settings file by removing default values
|
||||||
|
self._cleanup_default_values_from_disk()
|
||||||
|
|
||||||
def _detect_standalone_mode(self) -> bool:
|
def _detect_standalone_mode(self) -> bool:
|
||||||
"""Return ``True`` when running in standalone mode."""
|
"""Return ``True`` when running in standalone mode."""
|
||||||
@@ -226,7 +232,7 @@ class SettingsManager:
|
|||||||
return merged
|
return merged
|
||||||
|
|
||||||
def _ensure_default_settings(self) -> None:
|
def _ensure_default_settings(self) -> None:
|
||||||
"""Ensure all default settings keys exist"""
|
"""Ensure all default settings keys exist in memory (but don't save defaults to disk)"""
|
||||||
defaults = self._get_default_settings()
|
defaults = self._get_default_settings()
|
||||||
updated_existing = False
|
updated_existing = False
|
||||||
inserted_defaults = False
|
inserted_defaults = False
|
||||||
@@ -265,10 +271,10 @@ class SettingsManager:
|
|||||||
self.settings[key] = value
|
self.settings[key] = value
|
||||||
inserted_defaults = True
|
inserted_defaults = True
|
||||||
|
|
||||||
if updated_existing or (
|
# Save only if existing values were normalized/updated
|
||||||
inserted_defaults and self._bootstrap_reason in {"invalid", "unreadable"}
|
if updated_existing:
|
||||||
):
|
|
||||||
self._save_settings()
|
self._save_settings()
|
||||||
|
# Note: inserted_defaults no longer triggers save - defaults stay in memory only
|
||||||
|
|
||||||
def _migrate_to_library_registry(self) -> None:
|
def _migrate_to_library_registry(self) -> None:
|
||||||
"""Ensure settings include the multi-library registry structure."""
|
"""Ensure settings include the multi-library registry structure."""
|
||||||
@@ -711,6 +717,42 @@ class SettingsManager:
|
|||||||
|
|
||||||
self._startup_messages.append(payload)
|
self._startup_messages.append(payload)
|
||||||
|
|
||||||
|
def _cleanup_default_values_from_disk(self) -> None:
|
||||||
|
"""Remove default values from existing settings.json to keep it clean.
|
||||||
|
|
||||||
|
Only performs cleanup if the file contains a significant number of default
|
||||||
|
values (indicating it's "bloated"). Small files (like template-based configs)
|
||||||
|
are preserved as-is to avoid unexpected changes.
|
||||||
|
"""
|
||||||
|
# Only cleanup existing files (not new ones)
|
||||||
|
if self._bootstrap_reason == "missing" or self._original_disk_payload is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
defaults = self._get_default_settings()
|
||||||
|
disk_keys = set(self._original_disk_payload.keys())
|
||||||
|
|
||||||
|
# Count how many keys on disk are set to their default values
|
||||||
|
default_value_keys = set()
|
||||||
|
for key in disk_keys:
|
||||||
|
if key in CORE_USER_SETTING_KEYS:
|
||||||
|
continue # Core keys don't count as "cleanup candidates"
|
||||||
|
disk_value = self._original_disk_payload.get(key)
|
||||||
|
default_value = defaults.get(key)
|
||||||
|
# Compare using JSON serialization for complex objects
|
||||||
|
if json.dumps(disk_value, sort_keys=True, default=str) == json.dumps(default_value, sort_keys=True, default=str):
|
||||||
|
default_value_keys.add(key)
|
||||||
|
|
||||||
|
# Only cleanup if there are "many" default keys (indicating a bloated file)
|
||||||
|
# This preserves small/template-based configs while cleaning up legacy bloated files
|
||||||
|
if len(default_value_keys) >= DEFAULT_KEYS_CLEANUP_THRESHOLD:
|
||||||
|
logger.info(
|
||||||
|
"Cleaning up %d default value(s) from settings.json to keep it minimal",
|
||||||
|
len(default_value_keys)
|
||||||
|
)
|
||||||
|
self._save_settings()
|
||||||
|
# Update original payload to match what we just saved
|
||||||
|
self._original_disk_payload = self._serialize_settings_for_disk()
|
||||||
|
|
||||||
def _collect_configuration_warnings(self) -> None:
|
def _collect_configuration_warnings(self) -> None:
|
||||||
if not self._standalone_mode:
|
if not self._standalone_mode:
|
||||||
return
|
return
|
||||||
@@ -1101,7 +1143,12 @@ class SettingsManager:
|
|||||||
self._seed_template = None
|
self._seed_template = None
|
||||||
|
|
||||||
def _serialize_settings_for_disk(self) -> Dict[str, Any]:
|
def _serialize_settings_for_disk(self) -> Dict[str, Any]:
|
||||||
"""Return the settings payload that should be persisted to disk."""
|
"""Return the settings payload that should be persisted to disk.
|
||||||
|
|
||||||
|
Only saves settings that differ from defaults, keeping the config file
|
||||||
|
clean and focused on user customizations. Default values are still
|
||||||
|
available at runtime via _get_default_settings().
|
||||||
|
"""
|
||||||
|
|
||||||
if self._bootstrap_reason == "missing":
|
if self._bootstrap_reason == "missing":
|
||||||
minimal: Dict[str, Any] = {}
|
minimal: Dict[str, Any] = {}
|
||||||
@@ -1115,7 +1162,25 @@ class SettingsManager:
|
|||||||
|
|
||||||
return minimal
|
return minimal
|
||||||
|
|
||||||
return copy.deepcopy(self.settings)
|
# Only save settings that differ from defaults
|
||||||
|
defaults = self._get_default_settings()
|
||||||
|
minimal = {}
|
||||||
|
|
||||||
|
for key, value in self.settings.items():
|
||||||
|
default_value = defaults.get(key)
|
||||||
|
|
||||||
|
# Core settings are always saved (even if equal to default)
|
||||||
|
if key in CORE_USER_SETTING_KEYS:
|
||||||
|
minimal[key] = copy.deepcopy(value)
|
||||||
|
# Complex objects need deep comparison
|
||||||
|
elif isinstance(value, (dict, list)) and default_value is not None:
|
||||||
|
if json.dumps(value, sort_keys=True, default=str) != json.dumps(default_value, sort_keys=True, default=str):
|
||||||
|
minimal[key] = copy.deepcopy(value)
|
||||||
|
# Simple values use direct comparison
|
||||||
|
elif value != default_value:
|
||||||
|
minimal[key] = copy.deepcopy(value)
|
||||||
|
|
||||||
|
return minimal
|
||||||
|
|
||||||
def get_libraries(self) -> Dict[str, Dict[str, Any]]:
|
def get_libraries(self) -> Dict[str, Dict[str, Any]]:
|
||||||
"""Return a copy of the registered libraries."""
|
"""Return a copy of the registered libraries."""
|
||||||
|
|||||||
@@ -48,9 +48,14 @@ class BulkMetadataRefreshUseCase:
|
|||||||
for model in cache.raw_data
|
for model in cache.raw_data
|
||||||
if model.get("sha256")
|
if model.get("sha256")
|
||||||
and (not model.get("civitai") or not model["civitai"].get("id"))
|
and (not model.get("civitai") or not model["civitai"].get("id"))
|
||||||
and (
|
and not (
|
||||||
(enable_metadata_archive_db and not model.get("db_checked", False))
|
# Skip models confirmed not on CivitAI when no need to retry
|
||||||
or (not enable_metadata_archive_db and model.get("from_civitai") is True)
|
model.get("from_civitai") is False
|
||||||
|
and model.get("civitai_deleted") is True
|
||||||
|
and (
|
||||||
|
not enable_metadata_archive_db
|
||||||
|
or model.get("db_checked", False)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -255,6 +255,42 @@ class WebSocketManager:
|
|||||||
self._download_progress.pop(download_id, None)
|
self._download_progress.pop(download_id, None)
|
||||||
logger.debug(f"Cleaned up old download progress for {download_id}")
|
logger.debug(f"Cleaned up old download progress for {download_id}")
|
||||||
|
|
||||||
|
async def broadcast_cache_health_warning(self, report: 'HealthReport', page_type: str = None):
|
||||||
|
"""
|
||||||
|
Broadcast cache health warning to frontend.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: HealthReport instance from CacheHealthMonitor
|
||||||
|
page_type: The page type (loras, checkpoints, embeddings)
|
||||||
|
"""
|
||||||
|
from .cache_health_monitor import CacheHealthStatus
|
||||||
|
|
||||||
|
# Only broadcast if there are issues
|
||||||
|
if report.status == CacheHealthStatus.HEALTHY:
|
||||||
|
return
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'type': 'cache_health_warning',
|
||||||
|
'status': report.status.value,
|
||||||
|
'message': report.message,
|
||||||
|
'pageType': page_type,
|
||||||
|
'details': {
|
||||||
|
'total': report.total_entries,
|
||||||
|
'valid': report.valid_entries,
|
||||||
|
'invalid': report.invalid_entries,
|
||||||
|
'repaired': report.repaired_entries,
|
||||||
|
'corruption_rate': f"{report.corruption_rate:.1%}",
|
||||||
|
'invalid_paths': report.invalid_paths[:5], # Limit to first 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Broadcasting cache health warning: {report.status.value} "
|
||||||
|
f"({report.invalid_entries} invalid entries)"
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.broadcast(payload)
|
||||||
|
|
||||||
def get_connected_clients_count(self) -> int:
|
def get_connected_clients_count(self) -> int:
|
||||||
"""Get number of connected clients"""
|
"""Get number of connected clients"""
|
||||||
return len(self._websockets)
|
return len(self._websockets)
|
||||||
|
|||||||
@@ -216,6 +216,11 @@ class DownloadManager:
|
|||||||
self._progress["failed_models"] = set()
|
self._progress["failed_models"] = set()
|
||||||
|
|
||||||
self._is_downloading = True
|
self._is_downloading = True
|
||||||
|
snapshot = self._progress.snapshot()
|
||||||
|
|
||||||
|
# Create the download task without awaiting it
|
||||||
|
# This ensures the HTTP response is returned immediately
|
||||||
|
# while the actual processing happens in the background
|
||||||
self._download_task = asyncio.create_task(
|
self._download_task = asyncio.create_task(
|
||||||
self._download_all_example_images(
|
self._download_all_example_images(
|
||||||
output_dir,
|
output_dir,
|
||||||
@@ -227,7 +232,10 @@ class DownloadManager:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
snapshot = self._progress.snapshot()
|
# Add a callback to handle task completion/errors
|
||||||
|
self._download_task.add_done_callback(
|
||||||
|
lambda t: self._handle_download_task_done(t, output_dir)
|
||||||
|
)
|
||||||
except ExampleImagesDownloadError:
|
except ExampleImagesDownloadError:
|
||||||
# Re-raise our own exception types without wrapping
|
# Re-raise our own exception types without wrapping
|
||||||
self._is_downloading = False
|
self._is_downloading = False
|
||||||
@@ -241,10 +249,25 @@ class DownloadManager:
|
|||||||
)
|
)
|
||||||
raise ExampleImagesDownloadError(str(e)) from e
|
raise ExampleImagesDownloadError(str(e)) from e
|
||||||
|
|
||||||
await self._broadcast_progress(status="running")
|
# Broadcast progress in the background without blocking the response
|
||||||
|
# This ensures the HTTP response is returned immediately
|
||||||
|
asyncio.create_task(self._broadcast_progress(status="running"))
|
||||||
|
|
||||||
return {"success": True, "message": "Download started", "status": snapshot}
|
return {"success": True, "message": "Download started", "status": snapshot}
|
||||||
|
|
||||||
|
def _handle_download_task_done(self, task: asyncio.Task, output_dir: str) -> None:
|
||||||
|
"""Handle download task completion, including saving progress on error."""
|
||||||
|
try:
|
||||||
|
# This will re-raise any exception from the task
|
||||||
|
task.result()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Download task failed with error: {e}", exc_info=True)
|
||||||
|
# Ensure progress is saved even on failure
|
||||||
|
try:
|
||||||
|
self._save_progress(output_dir)
|
||||||
|
except Exception as save_error:
|
||||||
|
logger.error(f"Failed to save progress after task failure: {save_error}")
|
||||||
|
|
||||||
async def get_status(self, request):
|
async def get_status(self, request):
|
||||||
"""Get the current status of example images download."""
|
"""Get the current status of example images download."""
|
||||||
|
|
||||||
@@ -254,6 +277,130 @@ class DownloadManager:
|
|||||||
"status": self._progress.snapshot(),
|
"status": self._progress.snapshot(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async def check_pending_models(self, model_types: list[str]) -> dict:
|
||||||
|
"""Quickly check how many models need example images downloaded.
|
||||||
|
|
||||||
|
This is a lightweight check that avoids the overhead of starting
|
||||||
|
a full download task when no work is needed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict with keys:
|
||||||
|
- total_models: Total number of models across specified types
|
||||||
|
- pending_count: Number of models needing example images
|
||||||
|
- processed_count: Number of already processed models
|
||||||
|
- failed_count: Number of models marked as failed
|
||||||
|
- needs_download: True if there are pending models to process
|
||||||
|
"""
|
||||||
|
from ..services.service_registry import ServiceRegistry
|
||||||
|
|
||||||
|
if self._is_downloading:
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": True,
|
||||||
|
"total_models": 0,
|
||||||
|
"pending_count": 0,
|
||||||
|
"processed_count": 0,
|
||||||
|
"failed_count": 0,
|
||||||
|
"needs_download": False,
|
||||||
|
"message": "Download already in progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get scanners
|
||||||
|
scanners = []
|
||||||
|
if "lora" in model_types:
|
||||||
|
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||||
|
scanners.append(("lora", lora_scanner))
|
||||||
|
|
||||||
|
if "checkpoint" in model_types:
|
||||||
|
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||||
|
scanners.append(("checkpoint", checkpoint_scanner))
|
||||||
|
|
||||||
|
if "embedding" in model_types:
|
||||||
|
embedding_scanner = await ServiceRegistry.get_embedding_scanner()
|
||||||
|
scanners.append(("embedding", embedding_scanner))
|
||||||
|
|
||||||
|
# Load progress file to check processed models
|
||||||
|
settings_manager = get_settings_manager()
|
||||||
|
active_library = settings_manager.get_active_library_name()
|
||||||
|
output_dir = self._resolve_output_dir(active_library)
|
||||||
|
|
||||||
|
processed_models: set[str] = set()
|
||||||
|
failed_models: set[str] = set()
|
||||||
|
|
||||||
|
if output_dir:
|
||||||
|
progress_file = os.path.join(output_dir, ".download_progress.json")
|
||||||
|
if os.path.exists(progress_file):
|
||||||
|
try:
|
||||||
|
with open(progress_file, "r", encoding="utf-8") as f:
|
||||||
|
saved_progress = json.load(f)
|
||||||
|
processed_models = set(saved_progress.get("processed_models", []))
|
||||||
|
failed_models = set(saved_progress.get("failed_models", []))
|
||||||
|
except Exception:
|
||||||
|
pass # Ignore progress file errors for quick check
|
||||||
|
|
||||||
|
# Count models
|
||||||
|
total_models = 0
|
||||||
|
models_with_hash = 0
|
||||||
|
|
||||||
|
for scanner_type, scanner in scanners:
|
||||||
|
cache = await scanner.get_cached_data()
|
||||||
|
if cache and cache.raw_data:
|
||||||
|
for model in cache.raw_data:
|
||||||
|
total_models += 1
|
||||||
|
if model.get("sha256"):
|
||||||
|
models_with_hash += 1
|
||||||
|
|
||||||
|
# Calculate pending count
|
||||||
|
# A model is pending if it has a hash and is not in processed_models
|
||||||
|
# We also exclude failed_models unless force mode would be used
|
||||||
|
pending_count = models_with_hash - len(processed_models.intersection(
|
||||||
|
{m.get("sha256", "").lower() for scanner_type, scanner in scanners
|
||||||
|
for m in (await scanner.get_cached_data()).raw_data if m.get("sha256")}
|
||||||
|
))
|
||||||
|
|
||||||
|
# More accurate pending count: check which models actually need processing
|
||||||
|
pending_hashes = set()
|
||||||
|
for scanner_type, scanner in scanners:
|
||||||
|
cache = await scanner.get_cached_data()
|
||||||
|
if cache and cache.raw_data:
|
||||||
|
for model in cache.raw_data:
|
||||||
|
raw_hash = model.get("sha256")
|
||||||
|
if not raw_hash:
|
||||||
|
continue
|
||||||
|
model_hash = raw_hash.lower()
|
||||||
|
if model_hash not in processed_models:
|
||||||
|
# Check if model folder exists with files
|
||||||
|
model_dir = ExampleImagePathResolver.get_model_folder(
|
||||||
|
model_hash, active_library
|
||||||
|
)
|
||||||
|
if not _model_directory_has_files(model_dir):
|
||||||
|
pending_hashes.add(model_hash)
|
||||||
|
|
||||||
|
pending_count = len(pending_hashes)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": False,
|
||||||
|
"total_models": total_models,
|
||||||
|
"pending_count": pending_count,
|
||||||
|
"processed_count": len(processed_models),
|
||||||
|
"failed_count": len(failed_models),
|
||||||
|
"needs_download": pending_count > 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking pending models: {e}", exc_info=True)
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"total_models": 0,
|
||||||
|
"pending_count": 0,
|
||||||
|
"processed_count": 0,
|
||||||
|
"failed_count": 0,
|
||||||
|
"needs_download": False,
|
||||||
|
}
|
||||||
|
|
||||||
async def pause_download(self, request):
|
async def pause_download(self, request):
|
||||||
"""Pause the example images download."""
|
"""Pause the example images download."""
|
||||||
|
|
||||||
|
|||||||
@@ -43,8 +43,15 @@ class ExampleImagesProcessor:
|
|||||||
return media_url
|
return media_url
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_file_extension_from_content_or_headers(content, headers, fallback_url=None):
|
def _get_file_extension_from_content_or_headers(content, headers, fallback_url=None, media_type_hint=None):
|
||||||
"""Determine file extension from content magic bytes or headers"""
|
"""Determine file extension from content magic bytes or headers
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: File content bytes
|
||||||
|
headers: HTTP response headers
|
||||||
|
fallback_url: Original URL for extension extraction
|
||||||
|
media_type_hint: Optional media type hint from metadata (e.g., "video" or "image")
|
||||||
|
"""
|
||||||
# Check magic bytes for common formats
|
# Check magic bytes for common formats
|
||||||
if content:
|
if content:
|
||||||
if content.startswith(b'\xFF\xD8\xFF'):
|
if content.startswith(b'\xFF\xD8\xFF'):
|
||||||
@@ -82,6 +89,10 @@ class ExampleImagesProcessor:
|
|||||||
if ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or ext in SUPPORTED_MEDIA_EXTENSIONS['videos']:
|
if ext in SUPPORTED_MEDIA_EXTENSIONS['images'] or ext in SUPPORTED_MEDIA_EXTENSIONS['videos']:
|
||||||
return ext
|
return ext
|
||||||
|
|
||||||
|
# Use media type hint from metadata if available
|
||||||
|
if media_type_hint == "video":
|
||||||
|
return '.mp4'
|
||||||
|
|
||||||
# Default fallback
|
# Default fallback
|
||||||
return '.jpg'
|
return '.jpg'
|
||||||
|
|
||||||
@@ -136,7 +147,7 @@ class ExampleImagesProcessor:
|
|||||||
if success:
|
if success:
|
||||||
# Determine file extension from content or headers
|
# Determine file extension from content or headers
|
||||||
media_ext = ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
media_ext = ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
||||||
content, headers, original_url
|
content, headers, original_url, image.get("type")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if the detected file type is supported
|
# Check if the detected file type is supported
|
||||||
@@ -219,7 +230,7 @@ class ExampleImagesProcessor:
|
|||||||
if success:
|
if success:
|
||||||
# Determine file extension from content or headers
|
# Determine file extension from content or headers
|
||||||
media_ext = ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
media_ext = ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
||||||
content, headers, original_url
|
content, headers, original_url, image.get("type")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if the detected file type is supported
|
# Check if the detected file type is supported
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ async def extract_lora_metadata(file_path: str) -> Dict:
|
|||||||
base_model = determine_base_model(metadata.get("ss_base_model_version"))
|
base_model = determine_base_model(metadata.get("ss_base_model_version"))
|
||||||
return {"base_model": base_model}
|
return {"base_model": base_model}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error reading metadata from {file_path}: {str(e)}")
|
logger.error(f"Error reading metadata from {file_path}: {str(e)}")
|
||||||
return {"base_model": "Unknown"}
|
return {"base_model": "Unknown"}
|
||||||
|
|
||||||
async def extract_checkpoint_metadata(file_path: str) -> dict:
|
async def extract_checkpoint_metadata(file_path: str) -> dict:
|
||||||
|
|||||||
@@ -223,7 +223,7 @@ class MetadataManager:
|
|||||||
preview_url=normalize_path(preview_url),
|
preview_url=normalize_path(preview_url),
|
||||||
tags=[],
|
tags=[],
|
||||||
modelDescription="",
|
modelDescription="",
|
||||||
model_type="checkpoint",
|
sub_type="checkpoint",
|
||||||
from_civitai=True
|
from_civitai=True
|
||||||
)
|
)
|
||||||
elif model_class.__name__ == "EmbeddingMetadata":
|
elif model_class.__name__ == "EmbeddingMetadata":
|
||||||
@@ -238,6 +238,7 @@ class MetadataManager:
|
|||||||
preview_url=normalize_path(preview_url),
|
preview_url=normalize_path(preview_url),
|
||||||
tags=[],
|
tags=[],
|
||||||
modelDescription="",
|
modelDescription="",
|
||||||
|
sub_type="embedding",
|
||||||
from_civitai=True
|
from_civitai=True
|
||||||
)
|
)
|
||||||
else: # Default to LoraMetadata
|
else: # Default to LoraMetadata
|
||||||
|
|||||||
@@ -138,19 +138,15 @@ def calculate_recipe_fingerprint(loras):
|
|||||||
if not loras:
|
if not loras:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
# Filter valid entries and extract hash and strength
|
|
||||||
valid_loras = []
|
valid_loras = []
|
||||||
for lora in loras:
|
for lora in loras:
|
||||||
# Skip excluded loras
|
|
||||||
if lora.get("exclude", False):
|
if lora.get("exclude", False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Get the hash - use modelVersionId as fallback if hash is empty
|
|
||||||
hash_value = lora.get("hash", "").lower()
|
hash_value = lora.get("hash", "").lower()
|
||||||
if not hash_value and lora.get("isDeleted", False) and lora.get("modelVersionId"):
|
if not hash_value and lora.get("modelVersionId"):
|
||||||
hash_value = str(lora.get("modelVersionId"))
|
hash_value = str(lora.get("modelVersionId"))
|
||||||
|
|
||||||
# Skip entries without a valid hash
|
|
||||||
if not hash_value:
|
if not hash_value:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "comfyui-lora-manager"
|
name = "comfyui-lora-manager"
|
||||||
description = "Revolutionize your workflow with the ultimate LoRA companion for ComfyUI!"
|
description = "Revolutionize your workflow with the ultimate LoRA companion for ComfyUI!"
|
||||||
version = "0.9.13"
|
version = "0.9.15"
|
||||||
license = {file = "LICENSE"}
|
license = {file = "LICENSE"}
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiohttp",
|
"aiohttp",
|
||||||
|
|||||||
0
scripts/sync_translation_keys.py
Normal file → Executable file
0
scripts/sync_translation_keys.py
Normal file → Executable file
@@ -65,6 +65,8 @@ body {
|
|||||||
--space-1: calc(8px * 1);
|
--space-1: calc(8px * 1);
|
||||||
--space-2: calc(8px * 2);
|
--space-2: calc(8px * 2);
|
||||||
--space-3: calc(8px * 3);
|
--space-3: calc(8px * 3);
|
||||||
|
--space-4: calc(8px * 4);
|
||||||
|
--space-5: calc(8px * 5);
|
||||||
|
|
||||||
/* Z-index Scale */
|
/* Z-index Scale */
|
||||||
--z-base: 10;
|
--z-base: 10;
|
||||||
|
|||||||
@@ -113,6 +113,12 @@
|
|||||||
max-width: 110px;
|
max-width: 110px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Compact mode: hide sub-type to save space */
|
||||||
|
.compact-density .model-sub-type,
|
||||||
|
.compact-density .model-separator {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
.compact-density .card-actions i {
|
.compact-density .card-actions i {
|
||||||
font-size: 0.95em;
|
font-size: 0.95em;
|
||||||
padding: 3px;
|
padding: 3px;
|
||||||
|
|||||||
354
static/css/components/model-modal/metadata.css
Normal file
354
static/css/components/model-modal/metadata.css
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
/* Metadata Panel - Right Panel */
|
||||||
|
|
||||||
|
.metadata {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
height: 100%;
|
||||||
|
background: var(--card-bg);
|
||||||
|
border-left: 1px solid var(--lora-border);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header section */
|
||||||
|
.metadata__header {
|
||||||
|
padding: var(--space-3);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__title-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: var(--space-2);
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__name {
|
||||||
|
font-size: 1.4em;
|
||||||
|
font-weight: 600;
|
||||||
|
line-height: 1.3;
|
||||||
|
color: var(--text-color);
|
||||||
|
margin: 0;
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__edit-btn {
|
||||||
|
flex-shrink: 0;
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: opacity 0.2s, background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__header:hover .metadata__edit-btn {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__edit-btn:hover {
|
||||||
|
opacity: 1 !important;
|
||||||
|
background: var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Creator and actions */
|
||||||
|
.metadata__actions {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-2);
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-1);
|
||||||
|
padding: var(--space-1) var(--space-2);
|
||||||
|
background: rgba(0, 0, 0, 0.03);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .metadata__creator {
|
||||||
|
background: rgba(255, 255, 255, 0.03);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator:hover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator-avatar {
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
border-radius: 50%;
|
||||||
|
overflow: hidden;
|
||||||
|
background: var(--lora-accent);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator-avatar img {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator-avatar i {
|
||||||
|
color: white;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__creator-name {
|
||||||
|
font-size: 0.9em;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__civitai-link {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
padding: 6px 12px;
|
||||||
|
background: rgba(0, 0, 0, 0.03);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
color: var(--text-color);
|
||||||
|
font-size: 0.85em;
|
||||||
|
text-decoration: none;
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__civitai-link:hover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* License icons */
|
||||||
|
.metadata__licenses {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--space-1);
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__license-icon {
|
||||||
|
width: 22px;
|
||||||
|
height: 22px;
|
||||||
|
display: inline-block;
|
||||||
|
background-color: var(--text-muted);
|
||||||
|
-webkit-mask: var(--license-icon-image) center/contain no-repeat;
|
||||||
|
mask: var(--license-icon-image) center/contain no-repeat;
|
||||||
|
transition: background-color 0.2s ease, transform 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__license-icon:hover {
|
||||||
|
background-color: var(--text-color);
|
||||||
|
transform: translateY(-1px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tags */
|
||||||
|
.metadata__tags {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 4px 10px;
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
border: 1px solid oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.2);
|
||||||
|
border-radius: 999px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: var(--lora-accent);
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag:hover {
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.2);
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Info grid */
|
||||||
|
.metadata__info {
|
||||||
|
padding: var(--space-2) var(--space-3);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-item {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-item--full {
|
||||||
|
grid-column: 1 / -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-label {
|
||||||
|
font-size: 0.75em;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-value {
|
||||||
|
font-size: 0.9em;
|
||||||
|
color: var(--text-color);
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-value--mono {
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 0.85em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-value--path {
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: underline;
|
||||||
|
text-decoration-style: dotted;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info-value--path:hover {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Editable sections */
|
||||||
|
.metadata__section {
|
||||||
|
padding: var(--space-2) var(--space-3);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section-title {
|
||||||
|
font-size: 0.75em;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section-edit {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0;
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 4px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
transition: opacity 0.2s, background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section:hover .metadata__section-edit {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section-edit:hover {
|
||||||
|
opacity: 1 !important;
|
||||||
|
background: var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Usage tips / Trigger words */
|
||||||
|
.metadata__tags--editable {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag--editable {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag--editable:hover {
|
||||||
|
background: var(--lora-error);
|
||||||
|
border-color: var(--lora-error);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag--add {
|
||||||
|
background: transparent;
|
||||||
|
border-style: dashed;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__tag--add:hover {
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
border-style: solid;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notes textarea */
|
||||||
|
.metadata__notes {
|
||||||
|
min-height: 60px;
|
||||||
|
max-height: 120px;
|
||||||
|
padding: var(--space-2);
|
||||||
|
background: var(--bg-color);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
font-size: 0.9em;
|
||||||
|
line-height: 1.5;
|
||||||
|
color: var(--text-color);
|
||||||
|
resize: vertical;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__notes:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__notes::placeholder {
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Content area (tabs + scrollable content) */
|
||||||
|
.metadata__content {
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.metadata__header {
|
||||||
|
padding: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__name {
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__info {
|
||||||
|
padding: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metadata__section {
|
||||||
|
padding: var(--space-2);
|
||||||
|
}
|
||||||
|
}
|
||||||
167
static/css/components/model-modal/overlay.css
Normal file
167
static/css/components/model-modal/overlay.css
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
/* Model Modal Overlay - Split View Layout */
|
||||||
|
|
||||||
|
.model-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: var(--header-height, 48px);
|
||||||
|
left: var(--sidebar-width, 250px);
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
z-index: var(--z-modal, 1000);
|
||||||
|
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 1.2fr 0.8fr;
|
||||||
|
gap: 0;
|
||||||
|
|
||||||
|
background: var(--bg-color) !important;
|
||||||
|
opacity: 0;
|
||||||
|
animation: modalOverlayFadeIn 0.25s ease-out forwards;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.sidebar-collapsed {
|
||||||
|
left: var(--sidebar-collapsed-width, 60px);
|
||||||
|
grid-template-columns: 1.3fr 0.7fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes modalOverlayFadeIn {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.closing {
|
||||||
|
opacity: 1 !important;
|
||||||
|
animation: modalOverlayFadeOut 0.2s ease-out forwards;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes modalOverlayFadeOut {
|
||||||
|
from {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Close button */
|
||||||
|
.model-overlay__close {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-2);
|
||||||
|
right: var(--space-2);
|
||||||
|
width: 40px;
|
||||||
|
height: 40px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: rgba(0, 0, 0, 0.3);
|
||||||
|
border: none;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: white;
|
||||||
|
font-size: 1.5rem;
|
||||||
|
cursor: pointer;
|
||||||
|
z-index: 10;
|
||||||
|
transition: background-color 0.2s ease, transform 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay__close:hover {
|
||||||
|
background: rgba(0, 0, 0, 0.5);
|
||||||
|
transform: scale(1.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Keyboard shortcut hint */
|
||||||
|
.model-overlay__hint {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-2);
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
padding: var(--space-1) var(--space-3);
|
||||||
|
background: rgba(0, 0, 0, 0.7);
|
||||||
|
color: white;
|
||||||
|
font-size: 0.85em;
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
opacity: 0;
|
||||||
|
animation: hintFadeIn 0.3s ease-out 0.5s forwards, hintFadeOut 0.3s ease-out 3.5s forwards;
|
||||||
|
z-index: 10;
|
||||||
|
pointer-events: none;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes hintFadeIn {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateX(-50%) translateY(-10px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateX(-50%) translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes hintFadeOut {
|
||||||
|
from {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay__hint.hidden {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive breakpoints */
|
||||||
|
@media (max-width: 1400px) {
|
||||||
|
.model-overlay {
|
||||||
|
grid-template-columns: 1fr 1fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.sidebar-collapsed {
|
||||||
|
grid-template-columns: 1.1fr 0.9fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1200px) {
|
||||||
|
.model-overlay {
|
||||||
|
grid-template-columns: 1fr 1fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.sidebar-collapsed {
|
||||||
|
grid-template-columns: 1fr 1fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile: stack layout */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.model-overlay {
|
||||||
|
left: 0;
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
grid-template-rows: auto 1fr;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.model-overlay.sidebar-collapsed {
|
||||||
|
left: 0;
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
grid-template-rows: auto 1fr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Body scroll lock when modal is open */
|
||||||
|
body.modal-open {
|
||||||
|
overflow: hidden !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Transition effect for content when switching models */
|
||||||
|
.showcase,
|
||||||
|
.metadata {
|
||||||
|
opacity: 1;
|
||||||
|
transition: opacity 0.15s ease-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase.transitioning,
|
||||||
|
.metadata.transitioning {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
272
static/css/components/model-modal/recipes.css
Normal file
272
static/css/components/model-modal/recipes.css
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
/* Recipes Tab Styles */
|
||||||
|
|
||||||
|
.recipes-loading,
|
||||||
|
.recipes-error,
|
||||||
|
.recipes-empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: var(--space-8) var(--space-4);
|
||||||
|
text-align: center;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-loading i,
|
||||||
|
.recipes-error i,
|
||||||
|
.recipes-empty i {
|
||||||
|
font-size: 2rem;
|
||||||
|
margin-bottom: var(--space-3);
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-error i {
|
||||||
|
color: var(--lora-error);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
.recipes-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
padding: var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
margin: calc(-1 * var(--space-2)) calc(-1 * var(--space-2)) var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__text {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__eyebrow {
|
||||||
|
display: block;
|
||||||
|
font-size: 0.75em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.1em;
|
||||||
|
opacity: 0.6;
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header h3 {
|
||||||
|
margin: 0 0 var(--space-1);
|
||||||
|
font-size: 1.1em;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__description {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 0.85em;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__view-all {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-1);
|
||||||
|
padding: var(--space-1) var(--space-2);
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
color: var(--text-color);
|
||||||
|
font-size: 0.8em;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__view-all:hover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Recipe Cards Grid */
|
||||||
|
.recipes-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
||||||
|
gap: var(--space-3);
|
||||||
|
padding: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Recipe Card */
|
||||||
|
.recipe-card {
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
overflow: hidden;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:hover {
|
||||||
|
border-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.3);
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
|
||||||
|
transform: translateY(-2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
box-shadow: 0 0 0 2px oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Recipe Card Media */
|
||||||
|
.recipe-card__media {
|
||||||
|
position: relative;
|
||||||
|
aspect-ratio: 16 / 10;
|
||||||
|
overflow: hidden;
|
||||||
|
background: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__media img {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
transition: transform 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:hover .recipe-card__media img {
|
||||||
|
transform: scale(1.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__media-top {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-2);
|
||||||
|
right: var(--space-2);
|
||||||
|
display: flex;
|
||||||
|
gap: var(--space-1);
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:hover .recipe-card__media-top {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__copy {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: rgba(0, 0, 0, 0.6);
|
||||||
|
border: none;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: white;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
backdrop-filter: blur(4px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__copy:hover {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
transform: scale(1.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Recipe Card Body */
|
||||||
|
.recipe-card__body {
|
||||||
|
padding: var(--space-2);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__title {
|
||||||
|
margin: 0 0 var(--space-1);
|
||||||
|
font-size: 0.9em;
|
||||||
|
font-weight: 600;
|
||||||
|
line-height: 1.3;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__meta {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: var(--space-1);
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__badge {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
font-size: 0.7em;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__badge--base {
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
color: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__badge--empty {
|
||||||
|
background: var(--lora-border);
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__badge--ready {
|
||||||
|
background: oklch(60% 0.15 145);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__badge--missing {
|
||||||
|
background: oklch(60% 0.15 30);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__cta {
|
||||||
|
margin-top: auto;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding-top: var(--space-2);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
font-size: 0.8em;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--lora-accent);
|
||||||
|
opacity: 0.8;
|
||||||
|
transition: opacity 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:hover .recipe-card__cta {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__cta i {
|
||||||
|
transition: transform 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card:hover .recipe-card__cta i {
|
||||||
|
transform: translateX(4px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile Adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.recipes-header {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-header__view-all {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipes-grid {
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(150px, 1fr));
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.recipe-card__media-top {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
566
static/css/components/model-modal/showcase.css
Normal file
566
static/css/components/model-modal/showcase.css
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
/* Examples Showcase - Left Panel */
|
||||||
|
|
||||||
|
.showcase {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
height: 100%;
|
||||||
|
background: var(--lora-surface);
|
||||||
|
position: relative;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main image container */
|
||||||
|
.showcase__main {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: var(--space-3);
|
||||||
|
position: relative;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image-wrapper {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
background: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image {
|
||||||
|
max-width: 100%;
|
||||||
|
max-height: 70vh;
|
||||||
|
object-fit: contain;
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
transition: opacity 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image.loading {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Media container for images and videos */
|
||||||
|
.showcase__media-container {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase-media-wrapper {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__media-inner {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__media {
|
||||||
|
max-width: 100%;
|
||||||
|
max-height: 70vh;
|
||||||
|
object-fit: contain;
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
transition: filter 0.2s ease, opacity 0.3s ease;
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__media.loaded {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__media.blurred {
|
||||||
|
filter: blur(25px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NSFW notice for main media - redesigned to avoid conflicts with card.css */
|
||||||
|
.showcase__nsfw-notice {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
text-align: center;
|
||||||
|
color: white;
|
||||||
|
background: rgba(0, 0, 0, 0.75);
|
||||||
|
padding: var(--space-4) var(--space-5);
|
||||||
|
border-radius: var(--border-radius-base);
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
backdrop-filter: blur(8px);
|
||||||
|
-webkit-backdrop-filter: blur(8px);
|
||||||
|
z-index: 5;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nsfw-notice-content {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nsfw-notice-text {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.1em;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.02em;
|
||||||
|
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Show content button in NSFW notice - styled like card.css show-content-btn */
|
||||||
|
.showcase__nsfw-show-btn {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
padding: 6px var(--space-3);
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.9em;
|
||||||
|
font-weight: 500;
|
||||||
|
transition: background-color 0.2s ease, transform 0.2s ease;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nsfw-show-btn:hover {
|
||||||
|
background: oklch(58% 0.28 256);
|
||||||
|
transform: scale(1.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nsfw-show-btn i {
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Control button active state for blur toggle */
|
||||||
|
.showcase__control-btn.hidden {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Video indicator for thumbnails */
|
||||||
|
.thumbnail-rail__video-indicator {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
color: white;
|
||||||
|
font-size: 1.5rem;
|
||||||
|
text-shadow: 0 2px 4px rgba(0, 0, 0, 0.5);
|
||||||
|
pointer-events: none;
|
||||||
|
z-index: 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NSFW blur for thumbnails */
|
||||||
|
.thumbnail-rail__item.nsfw-blur img {
|
||||||
|
filter: blur(8px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Navigation arrows */
|
||||||
|
.showcase__nav {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
transform: translateY(-50%);
|
||||||
|
width: 48px;
|
||||||
|
height: 48px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: rgba(0, 0, 0, 0.4);
|
||||||
|
border: none;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: white;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 0.2s ease, background-color 0.2s ease, transform 0.2s ease;
|
||||||
|
z-index: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase:hover .showcase__nav {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav:hover {
|
||||||
|
background: rgba(0, 0, 0, 0.6);
|
||||||
|
transform: translateY(-50%) scale(1.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav--prev {
|
||||||
|
left: var(--space-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav--next {
|
||||||
|
right: var(--space-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav:disabled {
|
||||||
|
opacity: 0.3 !important;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Image controls overlay */
|
||||||
|
.showcase__controls {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-2);
|
||||||
|
right: var(--space-2);
|
||||||
|
display: flex;
|
||||||
|
gap: var(--space-1);
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(-5px);
|
||||||
|
transition: opacity 0.2s ease, transform 0.2s ease;
|
||||||
|
z-index: 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Image counter */
|
||||||
|
.showcase__counter {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-2);
|
||||||
|
left: var(--space-2);
|
||||||
|
background: rgba(0, 0, 0, 0.6);
|
||||||
|
color: white;
|
||||||
|
padding: var(--space-1) var(--space-2);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
font-size: 0.85em;
|
||||||
|
font-weight: 500;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-1);
|
||||||
|
opacity: 0.8;
|
||||||
|
transition: opacity 0.2s ease;
|
||||||
|
pointer-events: none;
|
||||||
|
font-variant-numeric: tabular-nums;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image-wrapper:hover .showcase__counter {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__counter-current {
|
||||||
|
font-weight: 600;
|
||||||
|
min-width: 2ch;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__counter-separator {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__counter-total {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image-wrapper:hover .showcase__controls {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__control-btn {
|
||||||
|
width: 36px;
|
||||||
|
height: 36px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: rgba(0, 0, 0, 0.5);
|
||||||
|
border: none;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: white;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.2s ease, transform 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__control-btn:hover {
|
||||||
|
background: rgba(0, 0, 0, 0.7);
|
||||||
|
transform: scale(1.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__control-btn--primary:hover {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__control-btn--danger:hover {
|
||||||
|
background: var(--lora-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Active state for toggle buttons */
|
||||||
|
.showcase__control-btn.active {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__control-btn.active:hover {
|
||||||
|
background: var(--lora-accent-hover, #3182ce);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Params panel (slide up) */
|
||||||
|
.showcase__params {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
background: var(--bg-color);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
padding: var(--space-3);
|
||||||
|
transform: translateY(100%);
|
||||||
|
transition: transform 0.3s cubic-bezier(0.175, 0.885, 0.32, 1.275);
|
||||||
|
z-index: 6;
|
||||||
|
max-height: 50%;
|
||||||
|
overflow-y: auto;
|
||||||
|
box-shadow: 0 -4px 12px rgba(0, 0, 0, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__params.visible {
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__params-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
padding-bottom: var(--space-2);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__params-title {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 0.95em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__params-close {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-color);
|
||||||
|
font-size: 1.2rem;
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 0;
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
border-radius: 50%;
|
||||||
|
transition: background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__params-close:hover {
|
||||||
|
background: var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Prompt display */
|
||||||
|
.showcase__prompt {
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
padding: var(--space-2);
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__prompt-label {
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__prompt-text {
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 0.85em;
|
||||||
|
line-height: 1.5;
|
||||||
|
max-height: 100px;
|
||||||
|
overflow-y: auto;
|
||||||
|
word-break: break-word;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__prompt-copy {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--space-1);
|
||||||
|
right: var(--space-1);
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: pointer;
|
||||||
|
padding: var(--space-1);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
transition: opacity 0.2s, background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__prompt-copy:hover {
|
||||||
|
opacity: 1;
|
||||||
|
background: var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading state */
|
||||||
|
.showcase__loading {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
height: 100%;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__loading i {
|
||||||
|
font-size: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Skeleton loading state */
|
||||||
|
.showcase__skeleton {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: var(--lora-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.skeleton-animation {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: var(--space-3);
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.skeleton-spinner {
|
||||||
|
font-size: 2.5rem;
|
||||||
|
color: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Error state */
|
||||||
|
.showcase__error {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
height: 100%;
|
||||||
|
color: var(--lora-error);
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__error i {
|
||||||
|
font-size: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__error p {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Empty state */
|
||||||
|
.showcase__empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
height: 100%;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.6;
|
||||||
|
text-align: center;
|
||||||
|
padding: var(--space-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__empty i {
|
||||||
|
font-size: 3rem;
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.showcase__main {
|
||||||
|
padding: var(--space-2);
|
||||||
|
min-height: 50vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__image {
|
||||||
|
max-height: 50vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav {
|
||||||
|
width: 40px;
|
||||||
|
height: 40px;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav--prev {
|
||||||
|
left: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__nav--next {
|
||||||
|
right: var(--space-1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ============================================
|
||||||
|
Lazy Loading Styles
|
||||||
|
============================================ */
|
||||||
|
|
||||||
|
/* Thumbnail lazy loading placeholder */
|
||||||
|
.thumbnail-rail__item img {
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 0.3s ease;
|
||||||
|
background: var(--lora-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loaded state */
|
||||||
|
.thumbnail-rail__item img.loaded {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading state with skeleton animation */
|
||||||
|
.thumbnail-rail__item img.lazy-load {
|
||||||
|
background: linear-gradient(
|
||||||
|
90deg,
|
||||||
|
var(--lora-surface) 25%,
|
||||||
|
var(--lora-border) 50%,
|
||||||
|
var(--lora-surface) 75%
|
||||||
|
);
|
||||||
|
background-size: 200% 100%;
|
||||||
|
animation: lazy-loading-shimmer 1.5s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes lazy-loading-shimmer {
|
||||||
|
0% {
|
||||||
|
background-position: 200% 0;
|
||||||
|
}
|
||||||
|
100% {
|
||||||
|
background-position: -200% 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Error state for failed loads */
|
||||||
|
.thumbnail-rail__item img.load-error {
|
||||||
|
opacity: 0.3;
|
||||||
|
background: var(--lora-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Cached image - subtle highlight */
|
||||||
|
.thumbnail-rail__item img[data-cached="true"] {
|
||||||
|
border: 1px solid var(--lora-accent);
|
||||||
|
}
|
||||||
153
static/css/components/model-modal/tabs.css
Normal file
153
static/css/components/model-modal/tabs.css
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
/* Tabs - Content Area */
|
||||||
|
|
||||||
|
.tabs {
|
||||||
|
display: flex;
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab {
|
||||||
|
flex: 1;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: var(--space-1);
|
||||||
|
padding: var(--space-2) var(--space-1);
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 2px solid transparent;
|
||||||
|
color: var(--text-color);
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.85em;
|
||||||
|
font-weight: 500;
|
||||||
|
transition: all 0.2s;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab:hover {
|
||||||
|
opacity: 1;
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab.active {
|
||||||
|
border-bottom-color: var(--lora-accent);
|
||||||
|
opacity: 1;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab__badge {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 2px 6px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
background: var(--badge-update-bg);
|
||||||
|
color: var(--badge-update-text);
|
||||||
|
font-size: 0.65em;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab__badge--pulse {
|
||||||
|
animation: tabBadgePulse 2s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes tabBadgePulse {
|
||||||
|
0%, 100% {
|
||||||
|
box-shadow: 0 0 0 0 color-mix(in oklch, var(--badge-update-bg) 50%, transparent);
|
||||||
|
}
|
||||||
|
50% {
|
||||||
|
box-shadow: 0 0 0 4px color-mix(in oklch, var(--badge-update-bg) 0%, transparent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tab content */
|
||||||
|
.tab-panels {
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-panel {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-panel.active {
|
||||||
|
display: block;
|
||||||
|
animation: tabPanelFadeIn 0.2s ease-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes tabPanelFadeIn {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(5px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Accordion within tab panels */
|
||||||
|
.accordion {
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
overflow: hidden;
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: var(--space-2) var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__header:hover {
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__title {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__icon {
|
||||||
|
transition: transform 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion.expanded .accordion__icon {
|
||||||
|
transform: rotate(180deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__content {
|
||||||
|
max-height: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
transition: max-height 0.3s ease-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion.expanded .accordion__content {
|
||||||
|
max-height: 500px; /* Adjust based on content */
|
||||||
|
}
|
||||||
|
|
||||||
|
.accordion__body {
|
||||||
|
padding: var(--space-3);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
font-size: 0.9em;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.tab {
|
||||||
|
font-size: 0.8em;
|
||||||
|
padding: var(--space-2) var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab__badge {
|
||||||
|
display: none; /* Hide badges on small screens */
|
||||||
|
}
|
||||||
|
}
|
||||||
151
static/css/components/model-modal/thumbnail-rail.css
Normal file
151
static/css/components/model-modal/thumbnail-rail.css
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
/* Thumbnail Rail - Bottom of Showcase */
|
||||||
|
|
||||||
|
.thumbnail-rail {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-2);
|
||||||
|
padding: var(--space-2) var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
overflow-x: auto;
|
||||||
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: var(--lora-border) transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail::-webkit-scrollbar {
|
||||||
|
height: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail::-webkit-scrollbar-track {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail::-webkit-scrollbar-thumb {
|
||||||
|
background-color: var(--lora-border);
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Thumbnail item */
|
||||||
|
.thumbnail-rail__item {
|
||||||
|
flex-shrink: 0;
|
||||||
|
width: 64px;
|
||||||
|
height: 64px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
overflow: hidden;
|
||||||
|
cursor: pointer;
|
||||||
|
position: relative;
|
||||||
|
border: 2px solid transparent;
|
||||||
|
transition: border-color 0.2s ease, transform 0.2s ease;
|
||||||
|
background: var(--lora-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__item img {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__item img.loaded {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__item:hover {
|
||||||
|
border-color: var(--lora-border);
|
||||||
|
transform: translateY(-2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__item.active {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
box-shadow: 0 0 0 2px oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NSFW blur for thumbnails - BEM naming to avoid conflicts with global .nsfw-blur */
|
||||||
|
.thumbnail-rail__item--nsfw-blurred img {
|
||||||
|
filter: blur(8px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Legacy support for old class names (deprecated) */
|
||||||
|
.thumbnail-rail__item.nsfw img {
|
||||||
|
filter: blur(8px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__nsfw-badge {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
background: rgba(0, 0, 0, 0.7);
|
||||||
|
color: white;
|
||||||
|
font-size: 0.65em;
|
||||||
|
padding: 2px 6px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
pointer-events: none;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Add button */
|
||||||
|
.thumbnail-rail__add {
|
||||||
|
flex-shrink: 0;
|
||||||
|
width: 64px;
|
||||||
|
height: 64px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 4px;
|
||||||
|
background: var(--bg-color);
|
||||||
|
border: 2px dashed var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
font-size: 0.75em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__add:hover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
color: var(--lora-accent);
|
||||||
|
opacity: 1;
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__add i {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Upload area (inline expansion) */
|
||||||
|
.thumbnail-rail__upload {
|
||||||
|
display: none;
|
||||||
|
position: absolute;
|
||||||
|
bottom: 100%;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
padding: var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
box-shadow: 0 -4px 12px rgba(0, 0, 0, 0.1);
|
||||||
|
z-index: 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__upload.visible {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.thumbnail-rail {
|
||||||
|
padding: var(--space-2);
|
||||||
|
gap: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumbnail-rail__item,
|
||||||
|
.thumbnail-rail__add {
|
||||||
|
width: 56px;
|
||||||
|
height: 56px;
|
||||||
|
}
|
||||||
|
}
|
||||||
163
static/css/components/model-modal/upload.css
Normal file
163
static/css/components/model-modal/upload.css
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
/* Upload Area Styles */
|
||||||
|
|
||||||
|
.upload-area {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
background: var(--card-bg);
|
||||||
|
border-top: 1px solid var(--lora-border);
|
||||||
|
transform: translateY(100%);
|
||||||
|
transition: transform 0.3s ease;
|
||||||
|
z-index: 10;
|
||||||
|
max-height: 50%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area.visible {
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__content {
|
||||||
|
padding: var(--space-4);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--space-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dropzone */
|
||||||
|
.upload-area__dropzone {
|
||||||
|
border: 2px dashed var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
padding: var(--space-6);
|
||||||
|
text-align: center;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__dropzone:hover {
|
||||||
|
border-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.5);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.02);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__dropzone.dragover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.08);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__input {
|
||||||
|
position: absolute;
|
||||||
|
inset: 0;
|
||||||
|
opacity: 0;
|
||||||
|
cursor: pointer;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Placeholder */
|
||||||
|
.upload-area__placeholder {
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__placeholder i {
|
||||||
|
font-size: 2.5rem;
|
||||||
|
color: var(--lora-accent);
|
||||||
|
opacity: 0.6;
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__title {
|
||||||
|
margin: 0 0 var(--space-1);
|
||||||
|
font-size: 1em;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__hint {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 0.8em;
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Uploading State */
|
||||||
|
.upload-area__uploading {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: var(--space-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__uploading i {
|
||||||
|
font-size: 2rem;
|
||||||
|
color: var(--lora-accent);
|
||||||
|
margin-bottom: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__uploading p {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Actions */
|
||||||
|
.upload-area__actions {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__cancel {
|
||||||
|
padding: var(--space-2) var(--space-4);
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
color: var(--text-color);
|
||||||
|
font-size: 0.9em;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__cancel:hover {
|
||||||
|
border-color: var(--lora-error);
|
||||||
|
color: var(--lora-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Add Button in Empty State */
|
||||||
|
.showcase__add-btn {
|
||||||
|
margin-top: var(--space-4);
|
||||||
|
padding: var(--space-2) var(--space-4);
|
||||||
|
background: var(--lora-accent);
|
||||||
|
border: none;
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
color: white;
|
||||||
|
font-size: 0.9em;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.showcase__add-btn:hover {
|
||||||
|
opacity: 0.9;
|
||||||
|
transform: translateY(-1px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile Adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.upload-area {
|
||||||
|
max-height: 60%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__content {
|
||||||
|
padding: var(--space-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__dropzone {
|
||||||
|
padding: var(--space-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-area__placeholder i {
|
||||||
|
font-size: 2rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
378
static/css/components/model-modal/versions.css
Normal file
378
static/css/components/model-modal/versions.css
Normal file
@@ -0,0 +1,378 @@
|
|||||||
|
/* Versions Tab Styles */
|
||||||
|
|
||||||
|
.versions-loading,
|
||||||
|
.versions-error,
|
||||||
|
.versions-empty {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: var(--space-8) var(--space-4);
|
||||||
|
text-align: center;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-loading i,
|
||||||
|
.versions-error i,
|
||||||
|
.versions-empty i {
|
||||||
|
font-size: 2rem;
|
||||||
|
margin-bottom: var(--space-3);
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-error i {
|
||||||
|
color: var(--lora-error);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-empty-filter {
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Toolbar */
|
||||||
|
.versions-toolbar {
|
||||||
|
padding: var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border-bottom: 1px solid var(--lora-border);
|
||||||
|
margin: calc(-1 * var(--space-2)) calc(-1 * var(--space-2)) var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-info-heading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-2);
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-info-heading h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1em;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-info p {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 0.85em;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-actions {
|
||||||
|
margin-top: var(--space-2);
|
||||||
|
display: flex;
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-filter-toggle {
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
color: var(--text-color);
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0.6;
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-filter-toggle:hover {
|
||||||
|
opacity: 1;
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-filter-toggle.active {
|
||||||
|
opacity: 1;
|
||||||
|
background: var(--lora-accent);
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-btn {
|
||||||
|
padding: var(--space-1) var(--space-3);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
font-size: 0.85em;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-btn-primary {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-btn-primary:hover {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Version Cards List */
|
||||||
|
.versions-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Version Card */
|
||||||
|
.version-card {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 80px 1fr auto;
|
||||||
|
gap: var(--space-3);
|
||||||
|
padding: var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border: 1px solid var(--lora-border);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-card:hover {
|
||||||
|
border-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.3);
|
||||||
|
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-card.is-current {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-card.is-clickable {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-card.is-clickable:hover {
|
||||||
|
border-color: var(--lora-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Version Media */
|
||||||
|
.version-media {
|
||||||
|
width: 80px;
|
||||||
|
height: 80px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
overflow: hidden;
|
||||||
|
background: var(--bg-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-media img,
|
||||||
|
.version-media video {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-media-placeholder {
|
||||||
|
font-size: 0.75em;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.5;
|
||||||
|
text-align: center;
|
||||||
|
padding: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Version Details */
|
||||||
|
.version-details {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-name {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 0.95em;
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badges {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: var(--space-1);
|
||||||
|
margin-bottom: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badge {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
font-size: 0.7em;
|
||||||
|
font-weight: 600;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badge-current {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badge-success {
|
||||||
|
background: var(--lora-success);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badge-info {
|
||||||
|
background: var(--badge-update-bg);
|
||||||
|
color: var(--badge-update-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-badge-muted {
|
||||||
|
background: var(--lora-border);
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-meta {
|
||||||
|
font-size: 0.8em;
|
||||||
|
opacity: 0.7;
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--space-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-meta-separator {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-meta-primary {
|
||||||
|
color: var(--lora-accent);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Version Actions */
|
||||||
|
.version-actions {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--space-1);
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action {
|
||||||
|
padding: var(--space-1) var(--space-3);
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
font-size: 0.8em;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-primary {
|
||||||
|
background: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-primary:hover {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-danger {
|
||||||
|
background: transparent;
|
||||||
|
border-color: var(--lora-error);
|
||||||
|
color: var(--lora-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-danger:hover {
|
||||||
|
background: var(--lora-error);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-ghost {
|
||||||
|
background: transparent;
|
||||||
|
border-color: var(--lora-border);
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action-ghost:hover {
|
||||||
|
opacity: 1;
|
||||||
|
border-color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Delete Modal for Version */
|
||||||
|
.version-delete-modal .delete-model-info {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 100px 1fr;
|
||||||
|
gap: var(--space-3);
|
||||||
|
margin: var(--space-3) 0;
|
||||||
|
padding: var(--space-3);
|
||||||
|
background: var(--lora-surface);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-delete-modal .delete-preview {
|
||||||
|
width: 100px;
|
||||||
|
height: 100px;
|
||||||
|
border-radius: var(--border-radius-xs);
|
||||||
|
overflow: hidden;
|
||||||
|
background: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-delete-modal .delete-preview img,
|
||||||
|
.version-delete-modal .delete-preview video {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-delete-modal .delete-info h3 {
|
||||||
|
margin: 0 0 var(--space-1);
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-delete-modal .version-base-model {
|
||||||
|
margin: 0;
|
||||||
|
opacity: 0.7;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile Adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.version-card {
|
||||||
|
grid-template-columns: 60px 1fr auto;
|
||||||
|
gap: var(--space-2);
|
||||||
|
padding: var(--space-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-media {
|
||||||
|
width: 60px;
|
||||||
|
height: 60px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-name {
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-actions {
|
||||||
|
flex-direction: row;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-action {
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 0.75em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-actions {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versions-toolbar-btn {
|
||||||
|
width: 100%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -512,6 +512,10 @@
|
|||||||
|
|
||||||
.filter-preset.active .preset-delete-btn {
|
.filter-preset.active .preset-delete-btn {
|
||||||
color: white;
|
color: white;
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.filter-preset:hover.active .preset-delete-btn {
|
||||||
opacity: 0.8;
|
opacity: 0.8;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -529,13 +533,16 @@
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
gap: 6px;
|
gap: 6px;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
|
max-width: 120px; /* Prevent long names from breaking layout */
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
}
|
}
|
||||||
|
|
||||||
.preset-delete-btn {
|
.preset-delete-btn {
|
||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
color: var(--text-color);
|
color: var(--text-color);
|
||||||
opacity: 0.5;
|
opacity: 0; /* Hidden by default */
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
padding: 4px;
|
padding: 4px;
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -546,6 +553,10 @@
|
|||||||
margin-left: auto;
|
margin-left: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.filter-preset:hover .preset-delete-btn {
|
||||||
|
opacity: 0.5; /* Show on hover */
|
||||||
|
}
|
||||||
|
|
||||||
.preset-delete-btn:hover {
|
.preset-delete-btn:hover {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
color: var(--lora-error, #e74c3c);
|
color: var(--lora-error, #e74c3c);
|
||||||
@@ -662,6 +673,57 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* Tag Logic Toggle Styles */
|
||||||
|
.filter-section-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.filter-section-header h4 {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-toggle {
|
||||||
|
display: flex;
|
||||||
|
background-color: var(--lora-surface);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-option {
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
padding: 2px 8px;
|
||||||
|
font-size: 11px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-option:hover {
|
||||||
|
opacity: 1;
|
||||||
|
background-color: var(--lora-surface-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-option.active {
|
||||||
|
background-color: var(--lora-accent);
|
||||||
|
color: white;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-option:first-child {
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tag-logic-option.active:first-child {
|
||||||
|
border-right: 1px solid rgba(255, 255, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
/* Mobile adjustments */
|
/* Mobile adjustments */
|
||||||
@media (max-width: 768px) {
|
@media (max-width: 768px) {
|
||||||
.search-options-panel,
|
.search-options-panel,
|
||||||
|
|||||||
@@ -27,6 +27,18 @@
|
|||||||
@import 'components/lora-modal/showcase.css';
|
@import 'components/lora-modal/showcase.css';
|
||||||
@import 'components/lora-modal/triggerwords.css';
|
@import 'components/lora-modal/triggerwords.css';
|
||||||
@import 'components/lora-modal/versions.css';
|
@import 'components/lora-modal/versions.css';
|
||||||
|
|
||||||
|
/* New Model Modal Split-View Design (Phase 1) */
|
||||||
|
@import 'components/model-modal/overlay.css';
|
||||||
|
@import 'components/model-modal/showcase.css';
|
||||||
|
@import 'components/model-modal/thumbnail-rail.css';
|
||||||
|
@import 'components/model-modal/metadata.css';
|
||||||
|
@import 'components/model-modal/tabs.css';
|
||||||
|
|
||||||
|
/* Model Modal Phase 2 - Tabs and Upload */
|
||||||
|
@import 'components/model-modal/versions.css';
|
||||||
|
@import 'components/model-modal/recipes.css';
|
||||||
|
@import 'components/model-modal/upload.css';
|
||||||
@import 'components/shared/edit-metadata.css';
|
@import 'components/shared/edit-metadata.css';
|
||||||
@import 'components/search-filter.css';
|
@import 'components/search-filter.css';
|
||||||
@import 'components/bulk.css';
|
@import 'components/bulk.css';
|
||||||
|
|||||||
@@ -924,6 +924,11 @@ export class BaseModelApiClient {
|
|||||||
params.append('model_type', type);
|
params.append('model_type', type);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add tag logic parameter (any = OR, all = AND)
|
||||||
|
if (pageState.filters.tagLogic) {
|
||||||
|
params.append('tag_logic', pageState.filters.tagLogic);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this._addModelSpecificParams(params, pageState);
|
this._addModelSpecificParams(params, pageState);
|
||||||
|
|||||||
@@ -48,15 +48,18 @@ export class ModelDuplicatesManager {
|
|||||||
// Method to check for duplicates count using existing endpoint
|
// Method to check for duplicates count using existing endpoint
|
||||||
async checkDuplicatesCount() {
|
async checkDuplicatesCount() {
|
||||||
try {
|
try {
|
||||||
|
const params = this._buildFilterQueryParams();
|
||||||
const endpoint = `/api/lm/${this.modelType}/find-duplicates`;
|
const endpoint = `/api/lm/${this.modelType}/find-duplicates`;
|
||||||
const response = await fetch(endpoint);
|
const url = params.toString() ? `${endpoint}?${params}` : endpoint;
|
||||||
|
|
||||||
|
const response = await fetch(url);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to get duplicates count: ${response.statusText}`);
|
throw new Error(`Failed to get duplicates count: ${response.statusText}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
const duplicatesCount = (data.duplicates || []).length;
|
const duplicatesCount = (data.duplicates || []).length;
|
||||||
this.updateDuplicatesBadge(duplicatesCount);
|
this.updateDuplicatesBadge(duplicatesCount);
|
||||||
@@ -103,29 +106,34 @@ export class ModelDuplicatesManager {
|
|||||||
|
|
||||||
async findDuplicates() {
|
async findDuplicates() {
|
||||||
try {
|
try {
|
||||||
// Determine API endpoint based on model type
|
const params = this._buildFilterQueryParams();
|
||||||
const endpoint = `/api/lm/${this.modelType}/find-duplicates`;
|
const endpoint = `/api/lm/${this.modelType}/find-duplicates`;
|
||||||
|
const url = params.toString() ? `${endpoint}?${params}` : endpoint;
|
||||||
const response = await fetch(endpoint);
|
|
||||||
|
const response = await fetch(url);
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to find duplicates: ${response.statusText}`);
|
throw new Error(`Failed to find duplicates: ${response.statusText}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
if (!data.success) {
|
if (!data.success) {
|
||||||
throw new Error(data.error || 'Unknown error finding duplicates');
|
throw new Error(data.error || 'Unknown error finding duplicates');
|
||||||
}
|
}
|
||||||
|
|
||||||
this.duplicateGroups = data.duplicates || [];
|
this.duplicateGroups = data.duplicates || [];
|
||||||
|
|
||||||
// Update the badge with the current count
|
// Update the badge with the current count
|
||||||
this.updateDuplicatesBadge(this.duplicateGroups.length);
|
this.updateDuplicatesBadge(this.duplicateGroups.length);
|
||||||
|
|
||||||
if (this.duplicateGroups.length === 0) {
|
if (this.duplicateGroups.length === 0) {
|
||||||
showToast('toast.duplicates.noDuplicatesFound', { type: this.modelType }, 'info');
|
showToast('toast.duplicates.noDuplicatesFound', { type: this.modelType }, 'info');
|
||||||
|
// If already in duplicate mode, exit to clear the display
|
||||||
|
if (this.inDuplicateMode) {
|
||||||
|
this.exitDuplicateMode();
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.enterDuplicateMode();
|
this.enterDuplicateMode();
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -134,6 +142,51 @@ export class ModelDuplicatesManager {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build query parameters from current filter state for duplicate finding.
|
||||||
|
* @returns {URLSearchParams} The query parameters to append to the API endpoint
|
||||||
|
*/
|
||||||
|
_buildFilterQueryParams() {
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
const pageState = getCurrentPageState();
|
||||||
|
const filters = pageState?.filters;
|
||||||
|
|
||||||
|
if (!filters) return params;
|
||||||
|
|
||||||
|
// Base model filters
|
||||||
|
if (filters.baseModel && Array.isArray(filters.baseModel)) {
|
||||||
|
filters.baseModel.forEach(m => params.append('base_model', m));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tag filters (tri-state: include/exclude)
|
||||||
|
if (filters.tags && typeof filters.tags === 'object') {
|
||||||
|
Object.entries(filters.tags).forEach(([tag, state]) => {
|
||||||
|
if (state === 'include') {
|
||||||
|
params.append('tag_include', tag);
|
||||||
|
} else if (state === 'exclude') {
|
||||||
|
params.append('tag_exclude', tag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model type filters
|
||||||
|
if (filters.modelTypes && Array.isArray(filters.modelTypes)) {
|
||||||
|
filters.modelTypes.forEach(t => params.append('model_type', t));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Folder filter (from active folder state)
|
||||||
|
if (pageState.activeFolder) {
|
||||||
|
params.append('folder', pageState.activeFolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Favorites filter
|
||||||
|
if (pageState.showFavoritesOnly) {
|
||||||
|
params.append('favorites_only', 'true');
|
||||||
|
}
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
enterDuplicateMode() {
|
enterDuplicateMode() {
|
||||||
this.inDuplicateMode = true;
|
this.inDuplicateMode = true;
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ class RecipeCard {
|
|||||||
card.dataset.nsfwLevel = this.recipe.preview_nsfw_level || 0;
|
card.dataset.nsfwLevel = this.recipe.preview_nsfw_level || 0;
|
||||||
card.dataset.created = this.recipe.created_date;
|
card.dataset.created = this.recipe.created_date;
|
||||||
card.dataset.id = this.recipe.id || '';
|
card.dataset.id = this.recipe.id || '';
|
||||||
|
card.dataset.folder = this.recipe.folder || '';
|
||||||
|
|
||||||
// Get base model with fallback
|
// Get base model with fallback
|
||||||
const baseModelLabel = (this.recipe.base_model || '').trim() || 'Unknown';
|
const baseModelLabel = (this.recipe.base_model || '').trim() || 'Unknown';
|
||||||
|
|||||||
@@ -198,6 +198,12 @@ class InitializationManager {
|
|||||||
handleProgressUpdate(data) {
|
handleProgressUpdate(data) {
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
console.log('Received progress update:', data);
|
console.log('Received progress update:', data);
|
||||||
|
|
||||||
|
// Handle cache health warning messages
|
||||||
|
if (data.type === 'cache_health_warning') {
|
||||||
|
this.handleCacheHealthWarning(data);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if this update is for our page type
|
// Check if this update is for our page type
|
||||||
if (data.pageType && data.pageType !== this.pageType) {
|
if (data.pageType && data.pageType !== this.pageType) {
|
||||||
@@ -466,6 +472,29 @@ class InitializationManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle cache health warning messages from WebSocket
|
||||||
|
*/
|
||||||
|
handleCacheHealthWarning(data) {
|
||||||
|
console.log('Cache health warning received:', data);
|
||||||
|
|
||||||
|
// Import bannerService dynamically to avoid circular dependencies
|
||||||
|
import('../managers/BannerService.js').then(({ bannerService }) => {
|
||||||
|
// Initialize banner service if not already done
|
||||||
|
if (!bannerService.initialized) {
|
||||||
|
bannerService.initialize().then(() => {
|
||||||
|
bannerService.registerCacheHealthBanner(data);
|
||||||
|
}).catch(err => {
|
||||||
|
console.error('Failed to initialize banner service:', err);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
bannerService.registerCacheHealthBanner(data);
|
||||||
|
}
|
||||||
|
}).catch(err => {
|
||||||
|
console.error('Failed to load banner service:', err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up resources when the component is destroyed
|
* Clean up resources when the component is destroyed
|
||||||
*/
|
*/
|
||||||
|
|||||||
871
static/js/components/model-modal/MetadataPanel.js
Normal file
871
static/js/components/model-modal/MetadataPanel.js
Normal file
@@ -0,0 +1,871 @@
|
|||||||
|
/**
|
||||||
|
* MetadataPanel - Right panel for model metadata and tabs
|
||||||
|
* Features:
|
||||||
|
* - Fixed header with model info
|
||||||
|
* - Compact metadata grid
|
||||||
|
* - Editable fields (usage tips, trigger words, notes)
|
||||||
|
* - Tabs with accordion content (Description, Versions, Recipes)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { escapeHtml, formatFileSize } from '../shared/utils.js';
|
||||||
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
|
import { showToast } from '../../utils/uiHelpers.js';
|
||||||
|
import { getModelApiClient } from '../../api/modelApiFactory.js';
|
||||||
|
import { VersionsTab } from './VersionsTab.js';
|
||||||
|
import { RecipesTab } from './RecipesTab.js';
|
||||||
|
|
||||||
|
export class MetadataPanel {
|
||||||
|
constructor(container) {
|
||||||
|
this.element = container;
|
||||||
|
this.model = null;
|
||||||
|
this.modelType = null;
|
||||||
|
this.activeTab = 'description';
|
||||||
|
this.versionsTab = null;
|
||||||
|
this.recipesTab = null;
|
||||||
|
this.notesDebounceTimer = null;
|
||||||
|
this.isEditingUsageTips = false;
|
||||||
|
this.isEditingTriggerWords = false;
|
||||||
|
this.editingTriggerWords = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the metadata panel
|
||||||
|
*/
|
||||||
|
render({ model, modelType }) {
|
||||||
|
this.model = model;
|
||||||
|
this.modelType = modelType;
|
||||||
|
|
||||||
|
this.element.innerHTML = this.getTemplate();
|
||||||
|
this.bindEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the HTML template
|
||||||
|
*/
|
||||||
|
getTemplate() {
|
||||||
|
const m = this.model;
|
||||||
|
const civitai = m.civitai || {};
|
||||||
|
const creator = civitai.creator || {};
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="metadata__header">
|
||||||
|
<div class="metadata__title-row">
|
||||||
|
<h2 class="metadata__name">${escapeHtml(m.model_name || 'Unknown')}</h2>
|
||||||
|
<button class="metadata__edit-btn" data-action="edit-name" title="${translate('modals.model.actions.editModelName', {}, 'Edit model name')}">
|
||||||
|
<i class="fas fa-pencil-alt"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__actions">
|
||||||
|
${creator.username ? `
|
||||||
|
<div class="metadata__creator" data-action="view-creator" data-username="${escapeHtml(creator.username)}">
|
||||||
|
${creator.image ? `
|
||||||
|
<div class="metadata__creator-avatar">
|
||||||
|
<img src="${creator.image}" alt="${escapeHtml(creator.username)}" onerror="this.style.display='none'; this.nextElementSibling.style.display='flex';">
|
||||||
|
<i class="fas fa-user" style="display: none;"></i>
|
||||||
|
</div>
|
||||||
|
` : `
|
||||||
|
<div class="metadata__creator-avatar">
|
||||||
|
<i class="fas fa-user"></i>
|
||||||
|
</div>
|
||||||
|
`}
|
||||||
|
<span class="metadata__creator-name">${escapeHtml(creator.username)}</span>
|
||||||
|
</div>
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
${m.from_civitai ? `
|
||||||
|
<a class="metadata__civitai-link" href="https://civitai.com/models/${civitai.modelId}" target="_blank" rel="noopener">
|
||||||
|
<i class="fas fa-globe"></i>
|
||||||
|
<span>${translate('modals.model.actions.viewOnCivitai', {}, 'Civitai')}</span>
|
||||||
|
</a>
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
${this.renderLicenseIcons()}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
${this.renderTags(m.tags)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__info">
|
||||||
|
<div class="metadata__info-grid">
|
||||||
|
<div class="metadata__info-item">
|
||||||
|
<span class="metadata__info-label">${translate('modals.model.metadata.version', {}, 'Version')}</span>
|
||||||
|
<span class="metadata__info-value">${escapeHtml(civitai.name || 'N/A')}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__info-item">
|
||||||
|
<span class="metadata__info-label">${translate('modals.model.metadata.size', {}, 'Size')}</span>
|
||||||
|
<span class="metadata__info-value metadata__info-value--mono">${formatFileSize(m.file_size)}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__info-item">
|
||||||
|
<span class="metadata__info-label">${translate('modals.model.metadata.baseModel', {}, 'Base Model')}</span>
|
||||||
|
<span class="metadata__info-value">${escapeHtml(m.base_model || translate('modals.model.metadata.unknown', {}, 'Unknown'))}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__info-item">
|
||||||
|
<span class="metadata__info-label">${translate('modals.model.metadata.fileName', {}, 'File Name')}</span>
|
||||||
|
<span class="metadata__info-value metadata__info-value--mono">${escapeHtml(m.file_name || 'N/A')}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__info-item metadata__info-item--full">
|
||||||
|
<span class="metadata__info-label">${translate('modals.model.metadata.location', {}, 'Location')}</span>
|
||||||
|
<span class="metadata__info-value metadata__info-value--path" data-action="open-location" title="${translate('modals.model.actions.openFileLocation', {}, 'Open file location')}">
|
||||||
|
${escapeHtml((m.file_path || '').replace(/[^/]+$/, '') || 'N/A')}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
${this.modelType === 'loras' ? this.renderLoraSpecific() : ''}
|
||||||
|
|
||||||
|
${this.renderNotes(m.notes)}
|
||||||
|
|
||||||
|
<div class="metadata__content">
|
||||||
|
${this.renderTabs()}
|
||||||
|
${this.renderTabPanels()}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render license icons
|
||||||
|
*/
|
||||||
|
renderLicenseIcons() {
|
||||||
|
const license = this.model.civitai?.model;
|
||||||
|
if (!license) return '';
|
||||||
|
|
||||||
|
const icons = [];
|
||||||
|
|
||||||
|
if (license.allowNoCredit === false) {
|
||||||
|
icons.push({ icon: 'user-check', title: translate('modals.model.license.creditRequired', {}, 'Creator credit required') });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (license.allowCommercialUse) {
|
||||||
|
const restrictions = this.resolveCommercialRestrictions(license.allowCommercialUse);
|
||||||
|
restrictions.forEach(r => {
|
||||||
|
icons.push({ icon: r.icon, title: r.title });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (license.allowDerivatives === false) {
|
||||||
|
icons.push({ icon: 'exchange-off', title: translate('modals.model.license.noDerivatives', {}, 'No sharing merges') });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (license.allowDifferentLicense === false) {
|
||||||
|
icons.push({ icon: 'rotate-2', title: translate('modals.model.license.noReLicense', {}, 'Same permissions required') });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (icons.length === 0) return '';
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="metadata__licenses">
|
||||||
|
${icons.map(icon => `
|
||||||
|
<span class="metadata__license-icon"
|
||||||
|
style="--license-icon-image: url('/loras_static/images/tabler/${icon.icon}.svg')"
|
||||||
|
title="${escapeHtml(icon.title)}"
|
||||||
|
role="img"
|
||||||
|
aria-label="${escapeHtml(icon.title)}">
|
||||||
|
</span>
|
||||||
|
`).join('')}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve commercial restrictions
|
||||||
|
*/
|
||||||
|
resolveCommercialRestrictions(value) {
|
||||||
|
const COMMERCIAL_CONFIG = [
|
||||||
|
{ key: 'image', icon: 'photo-off', title: translate('modals.model.license.noImageSell', {}, 'No selling generated content') },
|
||||||
|
{ key: 'rentcivit', icon: 'brush-off', title: translate('modals.model.license.noRentCivit', {}, 'No Civitai generation') },
|
||||||
|
{ key: 'rent', icon: 'world-off', title: translate('modals.model.license.noRent', {}, 'No generation services') },
|
||||||
|
{ key: 'sell', icon: 'shopping-cart-off', title: translate('modals.model.license.noSell', {}, 'No selling models') },
|
||||||
|
];
|
||||||
|
|
||||||
|
let allowed = new Set();
|
||||||
|
const values = Array.isArray(value) ? value : [value];
|
||||||
|
|
||||||
|
values.forEach(v => {
|
||||||
|
if (!v && v !== '') return;
|
||||||
|
const cleaned = String(v).trim().toLowerCase().replace(/[\s_-]+/g, '').replace(/[^a-z]/g, '');
|
||||||
|
if (cleaned) allowed.add(cleaned);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (allowed.has('sell')) {
|
||||||
|
allowed.add('rent');
|
||||||
|
allowed.add('rentcivit');
|
||||||
|
allowed.add('image');
|
||||||
|
}
|
||||||
|
if (allowed.has('rent')) {
|
||||||
|
allowed.add('rentcivit');
|
||||||
|
}
|
||||||
|
|
||||||
|
return COMMERCIAL_CONFIG.filter(config => !allowed.has(config.key));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render tags
|
||||||
|
*/
|
||||||
|
renderTags(tags) {
|
||||||
|
if (!tags || tags.length === 0) return '';
|
||||||
|
|
||||||
|
const visibleTags = tags.slice(0, 8);
|
||||||
|
const remaining = tags.length - visibleTags.length;
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="metadata__tags">
|
||||||
|
${visibleTags.map(tag => `
|
||||||
|
<span class="metadata__tag">${escapeHtml(tag)}</span>
|
||||||
|
`).join('')}
|
||||||
|
${remaining > 0 ? `<span class="metadata__tag">+${remaining}</span>` : ''}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render LoRA specific sections with editing
|
||||||
|
*/
|
||||||
|
renderLoraSpecific() {
|
||||||
|
const m = this.model;
|
||||||
|
const usageTips = m.usage_tips ? JSON.parse(m.usage_tips) : {};
|
||||||
|
const triggerWords = this.isEditingTriggerWords
|
||||||
|
? this.editingTriggerWords
|
||||||
|
: (m.civitai?.trainedWords || []);
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="metadata__section">
|
||||||
|
<div class="metadata__section-header">
|
||||||
|
<span class="metadata__section-title">${translate('modals.model.metadata.usageTips', {}, 'Usage Tips')}</span>
|
||||||
|
${!this.isEditingUsageTips ? `
|
||||||
|
<button class="metadata__section-edit" data-action="edit-usage-tips" title="${translate('modals.model.usageTips.add', {}, 'Add usage tip')}">
|
||||||
|
<i class="fas fa-plus"></i>
|
||||||
|
</button>
|
||||||
|
` : ''}
|
||||||
|
</div>
|
||||||
|
<div class="metadata__tags--editable">
|
||||||
|
${Object.entries(usageTips).map(([key, value]) => `
|
||||||
|
<span class="metadata__tag metadata__tag--editable" data-key="${escapeHtml(key)}" data-action="remove-usage-tip" title="${translate('common.actions.delete', {}, 'Delete')}">
|
||||||
|
${escapeHtml(key)}: ${escapeHtml(String(value))}
|
||||||
|
</span>
|
||||||
|
`).join('')}
|
||||||
|
${this.isEditingUsageTips ? this.renderUsageTipEditor() : ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="metadata__section">
|
||||||
|
<div class="metadata__section-header">
|
||||||
|
<span class="metadata__section-title">${translate('modals.model.triggerWords.label', {}, 'Trigger Words')}</span>
|
||||||
|
<div class="metadata__section-actions">
|
||||||
|
${!this.isEditingTriggerWords ? `
|
||||||
|
<button class="metadata__section-edit" data-action="copy-trigger-words" title="${translate('modals.model.triggerWords.copyWord', {}, 'Copy all trigger words')}">
|
||||||
|
<i class="fas fa-copy"></i>
|
||||||
|
</button>
|
||||||
|
<button class="metadata__section-edit" data-action="edit-trigger-words" title="${translate('modals.model.triggerWords.edit', {}, 'Edit trigger words')}">
|
||||||
|
<i class="fas fa-pencil-alt"></i>
|
||||||
|
</button>
|
||||||
|
` : `
|
||||||
|
<button class="metadata__section-edit" data-action="cancel-trigger-words" title="${translate('common.actions.cancel', {}, 'Cancel')}">
|
||||||
|
<i class="fas fa-times"></i>
|
||||||
|
</button>
|
||||||
|
<button class="metadata__section-edit metadata__section-edit--primary" data-action="save-trigger-words" title="${translate('common.actions.save', {}, 'Save')}">
|
||||||
|
<i class="fas fa-check"></i>
|
||||||
|
</button>
|
||||||
|
`}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="metadata__tags--editable">
|
||||||
|
${triggerWords.map(word => `
|
||||||
|
<span class="metadata__tag ${this.isEditingTriggerWords ? 'metadata__tag--removable' : 'metadata__tag--editable'}"
|
||||||
|
data-word="${escapeHtml(word)}"
|
||||||
|
${this.isEditingTriggerWords ? 'data-action="remove-trigger-word"' : 'data-action="copy-trigger-word"'}
|
||||||
|
title="${this.isEditingTriggerWords ? translate('common.actions.delete', {}, 'Delete') : translate('modals.model.triggerWords.copyWord', {}, 'Copy trigger word')}">
|
||||||
|
${escapeHtml(word)}
|
||||||
|
${this.isEditingTriggerWords ? '<i class="fas fa-times"></i>' : ''}
|
||||||
|
</span>
|
||||||
|
`).join('')}
|
||||||
|
${this.isEditingTriggerWords ? `
|
||||||
|
<input type="text"
|
||||||
|
class="metadata__tag-input"
|
||||||
|
placeholder="${translate('modals.model.triggerWords.addPlaceholder', {}, 'Type to add...')}"
|
||||||
|
data-action="add-trigger-word-input"
|
||||||
|
autofocus>
|
||||||
|
` : triggerWords.length === 0 ? `
|
||||||
|
<span class="metadata__tag metadata__tag--placeholder">${translate('modals.model.triggerWords.noTriggerWordsNeeded', {}, 'No trigger words needed')}</span>
|
||||||
|
` : ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render usage tip editor
|
||||||
|
*/
|
||||||
|
renderUsageTipEditor() {
|
||||||
|
return `
|
||||||
|
<div class="usage-tip-editor">
|
||||||
|
<select class="usage-tip-key" data-action="usage-tip-key-change">
|
||||||
|
<option value="">${translate('modals.model.usageTips.addPresetParameter', {}, 'Select parameter...')}</option>
|
||||||
|
<option value="strength">${translate('modals.model.usageTips.strength', {}, 'Strength')}</option>
|
||||||
|
<option value="strength_min">${translate('modals.model.usageTips.strengthMin', {}, 'Strength Min')}</option>
|
||||||
|
<option value="strength_max">${translate('modals.model.usageTips.strengthMax', {}, 'Strength Max')}</option>
|
||||||
|
<option value="clip_strength">${translate('modals.model.usageTips.clipStrength', {}, 'Clip Strength')}</option>
|
||||||
|
<option value="clip_skip">${translate('modals.model.usageTips.clipSkip', {}, 'Clip Skip')}</option>
|
||||||
|
</select>
|
||||||
|
<input type="text"
|
||||||
|
class="usage-tip-value"
|
||||||
|
placeholder="${translate('modals.model.usageTips.valuePlaceholder', {}, 'Value')}"
|
||||||
|
data-action="usage-tip-value-input">
|
||||||
|
<button class="usage-tip-add" data-action="add-usage-tip">
|
||||||
|
<i class="fas fa-check"></i>
|
||||||
|
</button>
|
||||||
|
<button class="usage-tip-cancel" data-action="cancel-usage-tips">
|
||||||
|
<i class="fas fa-times"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render notes section
|
||||||
|
*/
|
||||||
|
renderNotes(notes) {
|
||||||
|
return `
|
||||||
|
<div class="metadata__section metadata__section--notes">
|
||||||
|
<div class="metadata__section-header">
|
||||||
|
<span class="metadata__section-title">${translate('modals.model.metadata.additionalNotes', {}, 'Notes')}</span>
|
||||||
|
<span class="metadata__save-indicator" data-save-indicator style="display: none;">
|
||||||
|
<i class="fas fa-check"></i> Saved
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<textarea class="metadata__notes"
|
||||||
|
placeholder="${translate('modals.model.metadata.addNotesPlaceholder', {}, 'Add your notes here...')}"
|
||||||
|
data-action="notes-input">${escapeHtml(notes || '')}</textarea>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render tabs
|
||||||
|
*/
|
||||||
|
renderTabs() {
|
||||||
|
const tabs = [
|
||||||
|
{ id: 'description', label: translate('modals.model.tabs.description', {}, 'Description') },
|
||||||
|
{ id: 'versions', label: translate('modals.model.tabs.versions', {}, 'Versions') },
|
||||||
|
];
|
||||||
|
|
||||||
|
if (this.modelType === 'loras') {
|
||||||
|
tabs.push({ id: 'recipes', label: translate('modals.model.tabs.recipes', {}, 'Recipes') });
|
||||||
|
}
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="tabs">
|
||||||
|
${tabs.map(tab => `
|
||||||
|
<button class="tab ${tab.id === this.activeTab ? 'active' : ''}"
|
||||||
|
data-tab="${tab.id}"
|
||||||
|
data-action="switch-tab">
|
||||||
|
<span class="tab__label">${tab.label}</span>
|
||||||
|
${tab.id === 'versions' && this.model.update_available ? `
|
||||||
|
<span class="tab__badge tab__badge--pulse">${translate('modals.model.tabs.update', {}, 'Update')}</span>
|
||||||
|
` : ''}
|
||||||
|
</button>
|
||||||
|
`).join('')}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render tab panels
|
||||||
|
*/
|
||||||
|
renderTabPanels() {
|
||||||
|
const civitai = this.model.civitai || {};
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="tab-panels">
|
||||||
|
<div class="tab-panel ${this.activeTab === 'description' ? 'active' : ''}" data-panel="description">
|
||||||
|
<div class="accordion expanded">
|
||||||
|
<div class="accordion__header" data-action="toggle-accordion">
|
||||||
|
<span class="accordion__title">${translate('modals.model.metadata.aboutThisVersion', {}, 'About this version')}</span>
|
||||||
|
<i class="accordion__icon fas fa-chevron-down"></i>
|
||||||
|
</div>
|
||||||
|
<div class="accordion__content">
|
||||||
|
<div class="accordion__body">
|
||||||
|
${civitai.description ? `
|
||||||
|
<div class="markdown-content">${civitai.description}</div>
|
||||||
|
` : `
|
||||||
|
<p class="text-muted">${translate('modals.model.description.noDescription', {}, 'No description available')}</p>
|
||||||
|
`}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="accordion">
|
||||||
|
<div class="accordion__header" data-action="toggle-accordion">
|
||||||
|
<span class="accordion__title">${translate('modals.model.accordion.modelDescription', {}, 'Model Description')}</span>
|
||||||
|
<i class="accordion__icon fas fa-chevron-down"></i>
|
||||||
|
</div>
|
||||||
|
<div class="accordion__content">
|
||||||
|
<div class="accordion__body">
|
||||||
|
${civitai.model?.description ? `
|
||||||
|
<div class="markdown-content">${civitai.model.description}</div>
|
||||||
|
` : `
|
||||||
|
<p class="text-muted">${translate('modals.model.description.noDescription', {}, 'No description available')}</p>
|
||||||
|
`}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-panel ${this.activeTab === 'versions' ? 'active' : ''}" data-panel="versions">
|
||||||
|
<div class="versions-tab-container"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
${this.modelType === 'loras' ? `
|
||||||
|
<div class="tab-panel ${this.activeTab === 'recipes' ? 'active' : ''}" data-panel="recipes">
|
||||||
|
<div class="recipes-tab-container"></div>
|
||||||
|
</div>
|
||||||
|
` : ''}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind event listeners
|
||||||
|
*/
|
||||||
|
bindEvents() {
|
||||||
|
this.element.addEventListener('click', (e) => {
|
||||||
|
const target = e.target.closest('[data-action]');
|
||||||
|
if (!target) return;
|
||||||
|
|
||||||
|
const action = target.dataset.action;
|
||||||
|
|
||||||
|
switch (action) {
|
||||||
|
case 'switch-tab':
|
||||||
|
const tabId = target.dataset.tab;
|
||||||
|
this.switchTab(tabId);
|
||||||
|
break;
|
||||||
|
case 'toggle-accordion':
|
||||||
|
target.closest('.accordion')?.classList.toggle('expanded');
|
||||||
|
break;
|
||||||
|
case 'open-location':
|
||||||
|
this.openFileLocation();
|
||||||
|
break;
|
||||||
|
case 'view-creator':
|
||||||
|
const username = target.dataset.username || target.closest('[data-username]')?.dataset.username;
|
||||||
|
if (username) {
|
||||||
|
window.open(`https://civitai.com/user/${username}`, '_blank');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'edit-name':
|
||||||
|
this.editModelName();
|
||||||
|
break;
|
||||||
|
case 'edit-usage-tips':
|
||||||
|
this.startEditingUsageTips();
|
||||||
|
break;
|
||||||
|
case 'cancel-usage-tips':
|
||||||
|
this.cancelEditingUsageTips();
|
||||||
|
break;
|
||||||
|
case 'add-usage-tip':
|
||||||
|
this.addUsageTip();
|
||||||
|
break;
|
||||||
|
case 'remove-usage-tip':
|
||||||
|
const key = target.dataset.key;
|
||||||
|
if (key) this.removeUsageTip(key);
|
||||||
|
break;
|
||||||
|
case 'edit-trigger-words':
|
||||||
|
this.startEditingTriggerWords();
|
||||||
|
break;
|
||||||
|
case 'cancel-trigger-words':
|
||||||
|
this.cancelEditingTriggerWords();
|
||||||
|
break;
|
||||||
|
case 'save-trigger-words':
|
||||||
|
this.saveTriggerWords();
|
||||||
|
break;
|
||||||
|
case 'copy-trigger-words':
|
||||||
|
this.copyAllTriggerWords();
|
||||||
|
break;
|
||||||
|
case 'copy-trigger-word':
|
||||||
|
const word = target.dataset.word;
|
||||||
|
if (word) this.copyTriggerWord(word);
|
||||||
|
break;
|
||||||
|
case 'remove-trigger-word':
|
||||||
|
const wordToRemove = target.dataset.word || target.closest('[data-word]')?.dataset.word;
|
||||||
|
if (wordToRemove) this.removeTriggerWord(wordToRemove);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle input events
|
||||||
|
this.element.addEventListener('input', (e) => {
|
||||||
|
if (e.target.dataset.action === 'notes-input') {
|
||||||
|
this.handleNotesInput(e.target.value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.element.addEventListener('keydown', (e) => {
|
||||||
|
if (e.target.dataset.action === 'add-trigger-word-input' && e.key === 'Enter') {
|
||||||
|
e.preventDefault();
|
||||||
|
const value = e.target.value.trim();
|
||||||
|
if (value) {
|
||||||
|
this.addTriggerWord(value);
|
||||||
|
e.target.value = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.target.dataset.action === 'usage-tip-value-input' && e.key === 'Enter') {
|
||||||
|
e.preventDefault();
|
||||||
|
this.addUsageTip();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load initial tab content
|
||||||
|
if (this.activeTab === 'versions') {
|
||||||
|
this.loadVersionsTab();
|
||||||
|
} else if (this.activeTab === 'recipes') {
|
||||||
|
this.loadRecipesTab();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Switch active tab
|
||||||
|
*/
|
||||||
|
switchTab(tabId) {
|
||||||
|
this.activeTab = tabId;
|
||||||
|
|
||||||
|
// Update tab buttons
|
||||||
|
this.element.querySelectorAll('.tab').forEach(tab => {
|
||||||
|
tab.classList.toggle('active', tab.dataset.tab === tabId);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update panels
|
||||||
|
this.element.querySelectorAll('.tab-panel').forEach(panel => {
|
||||||
|
panel.classList.toggle('active', panel.dataset.panel === tabId);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load tab-specific data
|
||||||
|
if (tabId === 'versions') {
|
||||||
|
this.loadVersionsTab();
|
||||||
|
} else if (tabId === 'recipes') {
|
||||||
|
this.loadRecipesTab();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load versions tab
|
||||||
|
*/
|
||||||
|
loadVersionsTab() {
|
||||||
|
if (!this.versionsTab) {
|
||||||
|
const container = this.element.querySelector('.versions-tab-container');
|
||||||
|
if (container) {
|
||||||
|
this.versionsTab = new VersionsTab(container);
|
||||||
|
this.versionsTab.render({ model: this.model, modelType: this.modelType });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load recipes tab
|
||||||
|
*/
|
||||||
|
loadRecipesTab() {
|
||||||
|
if (!this.recipesTab) {
|
||||||
|
const container = this.element.querySelector('.recipes-tab-container');
|
||||||
|
if (container) {
|
||||||
|
this.recipesTab = new RecipesTab(container);
|
||||||
|
this.recipesTab.render({ model: this.model });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle notes input with auto-save
|
||||||
|
*/
|
||||||
|
handleNotesInput(value) {
|
||||||
|
// Clear existing timer
|
||||||
|
if (this.notesDebounceTimer) {
|
||||||
|
clearTimeout(this.notesDebounceTimer);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show saving indicator
|
||||||
|
const indicator = this.element.querySelector('[data-save-indicator]');
|
||||||
|
if (indicator) {
|
||||||
|
indicator.innerHTML = '<i class="fas fa-spinner fa-spin"></i> Saving...';
|
||||||
|
indicator.style.display = 'inline-flex';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debounce save
|
||||||
|
this.notesDebounceTimer = setTimeout(() => {
|
||||||
|
this.saveNotes(value);
|
||||||
|
}, 800);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save notes to server
|
||||||
|
*/
|
||||||
|
async saveNotes(notes) {
|
||||||
|
if (!this.model?.file_path) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.saveModelMetadata(this.model.file_path, { notes });
|
||||||
|
|
||||||
|
const indicator = this.element.querySelector('[data-save-indicator]');
|
||||||
|
if (indicator) {
|
||||||
|
indicator.innerHTML = '<i class="fas fa-check"></i> Saved';
|
||||||
|
setTimeout(() => {
|
||||||
|
indicator.style.display = 'none';
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
|
||||||
|
showToast('modals.model.notes.saved', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to save notes:', err);
|
||||||
|
|
||||||
|
const indicator = this.element.querySelector('[data-save-indicator]');
|
||||||
|
if (indicator) {
|
||||||
|
indicator.innerHTML = '<i class="fas fa-exclamation-triangle"></i> Failed';
|
||||||
|
}
|
||||||
|
|
||||||
|
showToast('modals.model.notes.saveFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start editing usage tips
|
||||||
|
*/
|
||||||
|
startEditingUsageTips() {
|
||||||
|
this.isEditingUsageTips = true;
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel editing usage tips
|
||||||
|
*/
|
||||||
|
cancelEditingUsageTips() {
|
||||||
|
this.isEditingUsageTips = false;
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add usage tip
|
||||||
|
*/
|
||||||
|
async addUsageTip() {
|
||||||
|
const keySelect = this.element.querySelector('.usage-tip-key');
|
||||||
|
const valueInput = this.element.querySelector('.usage-tip-value');
|
||||||
|
|
||||||
|
const key = keySelect?.value;
|
||||||
|
const value = valueInput?.value.trim();
|
||||||
|
|
||||||
|
if (!key || !value) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const usageTips = this.model.usage_tips ? JSON.parse(this.model.usage_tips) : {};
|
||||||
|
usageTips[key] = value;
|
||||||
|
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.saveModelMetadata(this.model.file_path, { usage_tips: JSON.stringify(usageTips) });
|
||||||
|
|
||||||
|
this.model.usage_tips = JSON.stringify(usageTips);
|
||||||
|
this.isEditingUsageTips = false;
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
showToast('common.actions.save', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to save usage tip:', err);
|
||||||
|
showToast('modals.model.notes.saveFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove usage tip
|
||||||
|
*/
|
||||||
|
async removeUsageTip(key) {
|
||||||
|
try {
|
||||||
|
const usageTips = this.model.usage_tips ? JSON.parse(this.model.usage_tips) : {};
|
||||||
|
delete usageTips[key];
|
||||||
|
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.saveModelMetadata(this.model.file_path, {
|
||||||
|
usage_tips: Object.keys(usageTips).length > 0 ? JSON.stringify(usageTips) : null
|
||||||
|
});
|
||||||
|
|
||||||
|
this.model.usage_tips = Object.keys(usageTips).length > 0 ? JSON.stringify(usageTips) : null;
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
showToast('common.actions.delete', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to remove usage tip:', err);
|
||||||
|
showToast('modals.model.notes.saveFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start editing trigger words
|
||||||
|
*/
|
||||||
|
startEditingTriggerWords() {
|
||||||
|
this.isEditingTriggerWords = true;
|
||||||
|
this.editingTriggerWords = [...(this.model.civitai?.trainedWords || [])];
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
|
||||||
|
// Focus input
|
||||||
|
setTimeout(() => {
|
||||||
|
const input = this.element.querySelector('.metadata__tag-input');
|
||||||
|
if (input) input.focus();
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel editing trigger words
|
||||||
|
*/
|
||||||
|
cancelEditingTriggerWords() {
|
||||||
|
this.isEditingTriggerWords = false;
|
||||||
|
this.editingTriggerWords = [];
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add trigger word during editing
|
||||||
|
*/
|
||||||
|
addTriggerWord(word) {
|
||||||
|
if (!word.trim()) return;
|
||||||
|
if (this.editingTriggerWords.includes(word.trim())) {
|
||||||
|
showToast('modals.model.triggerWords.validation.duplicate', {}, 'warning');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.editingTriggerWords.push(word.trim());
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
|
||||||
|
// Focus input again
|
||||||
|
setTimeout(() => {
|
||||||
|
const input = this.element.querySelector('.metadata__tag-input');
|
||||||
|
if (input) {
|
||||||
|
input.value = '';
|
||||||
|
input.focus();
|
||||||
|
}
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove trigger word during editing
|
||||||
|
*/
|
||||||
|
removeTriggerWord(word) {
|
||||||
|
this.editingTriggerWords = this.editingTriggerWords.filter(w => w !== word);
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save trigger words
|
||||||
|
*/
|
||||||
|
async saveTriggerWords() {
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.saveModelMetadata(this.model.file_path, {
|
||||||
|
trained_words: this.editingTriggerWords
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update local model data
|
||||||
|
if (!this.model.civitai) this.model.civitai = {};
|
||||||
|
this.model.civitai.trainedWords = [...this.editingTriggerWords];
|
||||||
|
|
||||||
|
this.isEditingTriggerWords = false;
|
||||||
|
this.editingTriggerWords = [];
|
||||||
|
this.refreshLoraSpecificSection();
|
||||||
|
showToast('common.actions.save', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to save trigger words:', err);
|
||||||
|
showToast('modals.model.notes.saveFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy single trigger word
|
||||||
|
*/
|
||||||
|
async copyTriggerWord(word) {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(word);
|
||||||
|
showToast('modals.model.triggerWords.copyWord', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to copy trigger word:', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy all trigger words
|
||||||
|
*/
|
||||||
|
async copyAllTriggerWords() {
|
||||||
|
const words = this.model.civitai?.trainedWords || [];
|
||||||
|
if (words.length === 0) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(words.join(', '));
|
||||||
|
showToast('modals.model.triggerWords.copyWord', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to copy trigger words:', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh LoRA specific section
|
||||||
|
*/
|
||||||
|
refreshLoraSpecificSection() {
|
||||||
|
if (this.modelType !== 'loras') return;
|
||||||
|
|
||||||
|
const sections = this.element.querySelectorAll('.metadata__section');
|
||||||
|
// First two sections are usage tips and trigger words
|
||||||
|
if (sections.length >= 2) {
|
||||||
|
const newHtml = this.renderLoraSpecific();
|
||||||
|
const tempDiv = document.createElement('div');
|
||||||
|
tempDiv.innerHTML = newHtml;
|
||||||
|
|
||||||
|
const newSections = tempDiv.querySelectorAll('.metadata__section');
|
||||||
|
if (newSections.length >= 2) {
|
||||||
|
sections[0].replaceWith(newSections[0]);
|
||||||
|
sections[1].replaceWith(newSections[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit model name
|
||||||
|
*/
|
||||||
|
async editModelName() {
|
||||||
|
const currentName = this.model.model_name || '';
|
||||||
|
const newName = prompt(
|
||||||
|
translate('modals.model.actions.editModelName', {}, 'Edit model name'),
|
||||||
|
currentName
|
||||||
|
);
|
||||||
|
|
||||||
|
if (newName !== null && newName.trim() !== '' && newName !== currentName) {
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.saveModelMetadata(this.model.file_path, { model_name: newName.trim() });
|
||||||
|
|
||||||
|
this.model.model_name = newName.trim();
|
||||||
|
this.element.querySelector('.metadata__name').textContent = newName.trim();
|
||||||
|
showToast('common.actions.save', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to save model name:', err);
|
||||||
|
showToast('modals.model.notes.saveFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open file location
|
||||||
|
*/
|
||||||
|
async openFileLocation() {
|
||||||
|
if (!this.model?.file_path) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/lm/open-file-location', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ file_path: this.model.file_path })
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) throw new Error('Failed to open file location');
|
||||||
|
|
||||||
|
showToast('modals.model.openFileLocation.success', {}, 'success');
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to open file location:', err);
|
||||||
|
showToast('modals.model.openFileLocation.failed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
374
static/js/components/model-modal/ModelModal.js
Normal file
374
static/js/components/model-modal/ModelModal.js
Normal file
@@ -0,0 +1,374 @@
|
|||||||
|
/**
|
||||||
|
* ModelModal - Main Controller for Split-View Overlay
|
||||||
|
*
|
||||||
|
* Architecture:
|
||||||
|
* - Overlay container (split-view grid)
|
||||||
|
* - Left: Showcase (ExampleShowcase component)
|
||||||
|
* - Right: Metadata + Tabs (MetadataPanel component)
|
||||||
|
* - Global keyboard navigation (↑↓ for model, ←→ for examples)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Showcase } from './Showcase.js';
|
||||||
|
import { MetadataPanel } from './MetadataPanel.js';
|
||||||
|
import { getModelApiClient } from '../../api/modelApiFactory.js';
|
||||||
|
import { state } from '../../state/index.js';
|
||||||
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
|
|
||||||
|
export class ModelModal {
|
||||||
|
static instance = null;
|
||||||
|
static overlayElement = null;
|
||||||
|
static currentModel = null;
|
||||||
|
static currentModelType = null;
|
||||||
|
static showcase = null;
|
||||||
|
static metadataPanel = null;
|
||||||
|
static isNavigating = false;
|
||||||
|
static keyboardHandler = null;
|
||||||
|
static hasShownHint = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show the model modal with split-view overlay
|
||||||
|
* @param {Object} model - Model data object
|
||||||
|
* @param {string} modelType - Type of model ('loras', 'checkpoints', 'embeddings')
|
||||||
|
*/
|
||||||
|
static async show(model, modelType) {
|
||||||
|
// If already open, animate transition to new model
|
||||||
|
if (this.isOpen()) {
|
||||||
|
await this.transitionToModel(model, modelType);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentModel = model;
|
||||||
|
this.currentModelType = modelType;
|
||||||
|
this.isNavigating = false;
|
||||||
|
|
||||||
|
// Fetch complete metadata
|
||||||
|
let completeCivitaiData = model.civitai || {};
|
||||||
|
if (model.file_path) {
|
||||||
|
try {
|
||||||
|
const fullMetadata = await getModelApiClient().fetchModelMetadata(model.file_path);
|
||||||
|
completeCivitaiData = fullMetadata || model.civitai || {};
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to fetch complete metadata:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentModel = {
|
||||||
|
...model,
|
||||||
|
civitai: completeCivitaiData
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create overlay
|
||||||
|
this.createOverlay();
|
||||||
|
|
||||||
|
// Initialize components
|
||||||
|
this.showcase = new Showcase(this.overlayElement.querySelector('.showcase'));
|
||||||
|
this.metadataPanel = new MetadataPanel(this.overlayElement.querySelector('.metadata'));
|
||||||
|
|
||||||
|
// Render content
|
||||||
|
await this.render();
|
||||||
|
|
||||||
|
// Setup keyboard navigation
|
||||||
|
this.setupKeyboardNavigation();
|
||||||
|
|
||||||
|
// Lock body scroll
|
||||||
|
document.body.classList.add('modal-open');
|
||||||
|
|
||||||
|
// Show hint on first use
|
||||||
|
if (!this.hasShownHint) {
|
||||||
|
this.showKeyboardHint();
|
||||||
|
this.hasShownHint = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the overlay DOM structure
|
||||||
|
*/
|
||||||
|
static createOverlay() {
|
||||||
|
// Check sidebar state for layout adjustment
|
||||||
|
const sidebar = document.querySelector('.folder-sidebar');
|
||||||
|
const isSidebarCollapsed = sidebar?.classList.contains('collapsed');
|
||||||
|
|
||||||
|
this.overlayElement = document.createElement('div');
|
||||||
|
this.overlayElement.className = `model-overlay ${isSidebarCollapsed ? 'sidebar-collapsed' : ''}`;
|
||||||
|
this.overlayElement.id = 'modelModal';
|
||||||
|
this.overlayElement.innerHTML = `
|
||||||
|
<button class="model-overlay__close" title="${translate('common.close', {}, 'Close')}">
|
||||||
|
<i class="fas fa-times"></i>
|
||||||
|
</button>
|
||||||
|
<div class="model-overlay__hint">
|
||||||
|
↑↓ ${translate('modals.model.navigation.switchModel', {}, 'Switch model')} |
|
||||||
|
←→ ${translate('modals.model.navigation.browseExamples', {}, 'Browse examples')} |
|
||||||
|
ESC ${translate('common.close', {}, 'Close')}
|
||||||
|
</div>
|
||||||
|
<div class="showcase"></div>
|
||||||
|
<div class="metadata"></div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Close button handler
|
||||||
|
this.overlayElement.querySelector('.model-overlay__close').addEventListener('click', () => {
|
||||||
|
this.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Click outside to close
|
||||||
|
this.overlayElement.addEventListener('click', (e) => {
|
||||||
|
if (e.target === this.overlayElement) {
|
||||||
|
this.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
document.body.appendChild(this.overlayElement);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render content into components
|
||||||
|
*/
|
||||||
|
static async render() {
|
||||||
|
if (!this.currentModel) return;
|
||||||
|
|
||||||
|
// Prepare images data
|
||||||
|
const regularImages = this.currentModel.civitai?.images || [];
|
||||||
|
const customImages = this.currentModel.civitai?.customImages || [];
|
||||||
|
const allImages = [...regularImages, ...customImages];
|
||||||
|
|
||||||
|
// Render showcase
|
||||||
|
this.showcase.render({
|
||||||
|
images: allImages,
|
||||||
|
modelHash: this.currentModel.sha256,
|
||||||
|
filePath: this.currentModel.file_path
|
||||||
|
});
|
||||||
|
|
||||||
|
// Render metadata panel
|
||||||
|
this.metadataPanel.render({
|
||||||
|
model: this.currentModel,
|
||||||
|
modelType: this.currentModelType
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transition to a different model with animation
|
||||||
|
*/
|
||||||
|
static async transitionToModel(model, modelType) {
|
||||||
|
// Ensure components are initialized
|
||||||
|
if (!this.showcase || !this.metadataPanel) {
|
||||||
|
console.warn('Showcase or MetadataPanel not initialized, falling back to show()');
|
||||||
|
await this.show(model, modelType);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fade out current content
|
||||||
|
this.showcase?.element?.classList.add('transitioning');
|
||||||
|
this.metadataPanel?.element?.classList.add('transitioning');
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 150));
|
||||||
|
|
||||||
|
// Fetch complete metadata for new model
|
||||||
|
let completeCivitaiData = model.civitai || {};
|
||||||
|
if (model.file_path) {
|
||||||
|
try {
|
||||||
|
const fullMetadata = await getModelApiClient().fetchModelMetadata(model.file_path);
|
||||||
|
completeCivitaiData = fullMetadata || model.civitai || {};
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to fetch complete metadata:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update model data in-place
|
||||||
|
this.currentModel = {
|
||||||
|
...model,
|
||||||
|
civitai: completeCivitaiData
|
||||||
|
};
|
||||||
|
this.currentModelType = modelType;
|
||||||
|
|
||||||
|
// Render new content in-place
|
||||||
|
await this.render();
|
||||||
|
|
||||||
|
// Fade in new content
|
||||||
|
this.showcase?.element?.classList.remove('transitioning');
|
||||||
|
this.metadataPanel?.element?.classList.remove('transitioning');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the modal
|
||||||
|
*/
|
||||||
|
static close(animate = true) {
|
||||||
|
if (!this.overlayElement) return;
|
||||||
|
|
||||||
|
// Cleanup keyboard handler
|
||||||
|
this.cleanupKeyboardNavigation();
|
||||||
|
|
||||||
|
// Animate out
|
||||||
|
if (animate) {
|
||||||
|
this.overlayElement.classList.add('closing');
|
||||||
|
setTimeout(() => {
|
||||||
|
this.removeOverlay();
|
||||||
|
}, 200);
|
||||||
|
} else {
|
||||||
|
this.removeOverlay();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unlock body scroll
|
||||||
|
document.body.classList.remove('modal-open');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove overlay from DOM
|
||||||
|
*/
|
||||||
|
static removeOverlay() {
|
||||||
|
if (this.overlayElement) {
|
||||||
|
this.overlayElement.remove();
|
||||||
|
this.overlayElement = null;
|
||||||
|
}
|
||||||
|
this.showcase = null;
|
||||||
|
this.metadataPanel = null;
|
||||||
|
this.currentModel = null;
|
||||||
|
this.currentModelType = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if modal is currently open
|
||||||
|
*/
|
||||||
|
static isOpen() {
|
||||||
|
return !!this.overlayElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup global keyboard navigation
|
||||||
|
*/
|
||||||
|
static setupKeyboardNavigation() {
|
||||||
|
this.keyboardHandler = (e) => {
|
||||||
|
// Ignore if user is typing in an input
|
||||||
|
if (this.isUserTyping()) return;
|
||||||
|
|
||||||
|
switch (e.key) {
|
||||||
|
case 'ArrowUp':
|
||||||
|
e.preventDefault();
|
||||||
|
this.navigateModel('prev');
|
||||||
|
break;
|
||||||
|
case 'ArrowDown':
|
||||||
|
e.preventDefault();
|
||||||
|
this.navigateModel('next');
|
||||||
|
break;
|
||||||
|
case 'ArrowLeft':
|
||||||
|
e.preventDefault();
|
||||||
|
this.showcase?.prevImage();
|
||||||
|
break;
|
||||||
|
case 'ArrowRight':
|
||||||
|
e.preventDefault();
|
||||||
|
this.showcase?.nextImage();
|
||||||
|
break;
|
||||||
|
case 'Escape':
|
||||||
|
e.preventDefault();
|
||||||
|
this.close();
|
||||||
|
break;
|
||||||
|
case 'i':
|
||||||
|
case 'I':
|
||||||
|
if (!this.isUserTyping()) {
|
||||||
|
e.preventDefault();
|
||||||
|
this.showcase?.toggleParams();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'c':
|
||||||
|
case 'C':
|
||||||
|
if (!this.isUserTyping()) {
|
||||||
|
e.preventDefault();
|
||||||
|
this.showcase?.copyPrompt();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
document.addEventListener('keydown', this.keyboardHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup keyboard navigation
|
||||||
|
*/
|
||||||
|
static cleanupKeyboardNavigation() {
|
||||||
|
if (this.keyboardHandler) {
|
||||||
|
document.removeEventListener('keydown', this.keyboardHandler);
|
||||||
|
this.keyboardHandler = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if user is currently typing in an input/editable field
|
||||||
|
*/
|
||||||
|
static isUserTyping() {
|
||||||
|
const activeElement = document.activeElement;
|
||||||
|
if (!activeElement) return false;
|
||||||
|
|
||||||
|
const tagName = activeElement.tagName?.toLowerCase();
|
||||||
|
const isEditable = activeElement.isContentEditable;
|
||||||
|
const isInput = ['input', 'textarea', 'select'].includes(tagName);
|
||||||
|
|
||||||
|
return isEditable || isInput;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigate to previous/next model using virtual scroller
|
||||||
|
*/
|
||||||
|
static async navigateModel(direction) {
|
||||||
|
if (this.isNavigating || !this.currentModel?.file_path) return;
|
||||||
|
|
||||||
|
const scroller = state.virtualScroller;
|
||||||
|
if (!scroller || typeof scroller.getAdjacentItemByFilePath !== 'function') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isNavigating = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const adjacent = await scroller.getAdjacentItemByFilePath(
|
||||||
|
this.currentModel.file_path,
|
||||||
|
direction
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!adjacent?.item) {
|
||||||
|
const toastKey = direction === 'prev'
|
||||||
|
? 'modals.model.navigation.noPrevious'
|
||||||
|
: 'modals.model.navigation.noNext';
|
||||||
|
const fallback = direction === 'prev'
|
||||||
|
? 'No previous model available'
|
||||||
|
: 'No next model available';
|
||||||
|
// Show toast notification (imported from utils)
|
||||||
|
import('../../utils/uiHelpers.js').then(({ showToast }) => {
|
||||||
|
showToast(toastKey, {}, 'info', fallback);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.transitionToModel(adjacent.item, this.currentModelType);
|
||||||
|
} finally {
|
||||||
|
this.isNavigating = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show keyboard shortcut hint
|
||||||
|
*/
|
||||||
|
static showKeyboardHint() {
|
||||||
|
const hint = this.overlayElement?.querySelector('.model-overlay__hint');
|
||||||
|
if (hint) {
|
||||||
|
// Animation is handled by CSS, just ensure it's visible
|
||||||
|
hint.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update sidebar state when sidebar is toggled
|
||||||
|
*/
|
||||||
|
static updateSidebarState(collapsed) {
|
||||||
|
if (!this.overlayElement) return;
|
||||||
|
|
||||||
|
if (collapsed) {
|
||||||
|
this.overlayElement.classList.add('sidebar-collapsed');
|
||||||
|
} else {
|
||||||
|
this.overlayElement.classList.remove('sidebar-collapsed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Listen for sidebar toggle events
|
||||||
|
document.addEventListener('sidebar-toggle', (e) => {
|
||||||
|
ModelModal.updateSidebarState(e.detail.collapsed);
|
||||||
|
});
|
||||||
321
static/js/components/model-modal/RecipesTab.js
Normal file
321
static/js/components/model-modal/RecipesTab.js
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
/**
|
||||||
|
* RecipesTab - Recipe cards grid component for LoRA models
|
||||||
|
* Features:
|
||||||
|
* - Recipe cards grid layout
|
||||||
|
* - Copy/View actions
|
||||||
|
* - LoRA availability status badges
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { escapeHtml } from '../shared/utils.js';
|
||||||
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
|
import { showToast, copyToClipboard } from '../../utils/uiHelpers.js';
|
||||||
|
import { setSessionItem, removeSessionItem } from '../../utils/storageHelpers.js';
|
||||||
|
|
||||||
|
export class RecipesTab {
|
||||||
|
constructor(container) {
|
||||||
|
this.element = container;
|
||||||
|
this.model = null;
|
||||||
|
this.recipes = [];
|
||||||
|
this.isLoading = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the recipes tab
|
||||||
|
*/
|
||||||
|
async render({ model }) {
|
||||||
|
this.model = model;
|
||||||
|
this.element.innerHTML = this.getLoadingTemplate();
|
||||||
|
|
||||||
|
await this.loadRecipes();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get loading template
|
||||||
|
*/
|
||||||
|
getLoadingTemplate() {
|
||||||
|
return `
|
||||||
|
<div class="recipes-loading">
|
||||||
|
<i class="fas fa-spinner fa-spin"></i>
|
||||||
|
<span>${translate('modals.model.loading.recipes', {}, 'Loading recipes...')}</span>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load recipes from API
|
||||||
|
*/
|
||||||
|
async loadRecipes() {
|
||||||
|
const sha256 = this.model?.sha256;
|
||||||
|
|
||||||
|
if (!sha256) {
|
||||||
|
this.renderError('Missing model hash');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isLoading = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/lm/recipes/for-lora?hash=${encodeURIComponent(sha256.toLowerCase())}`);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (!data.success) {
|
||||||
|
throw new Error(data.error || 'Failed to load recipes');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.recipes = data.recipes || [];
|
||||||
|
this.renderRecipes();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load recipes:', error);
|
||||||
|
this.renderError(error.message);
|
||||||
|
} finally {
|
||||||
|
this.isLoading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render error state
|
||||||
|
*/
|
||||||
|
renderError(message) {
|
||||||
|
this.element.innerHTML = `
|
||||||
|
<div class="recipes-error">
|
||||||
|
<i class="fas fa-exclamation-circle"></i>
|
||||||
|
<p>${escapeHtml(message || 'Failed to load recipes. Please try again later.')}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render empty state
|
||||||
|
*/
|
||||||
|
renderEmpty() {
|
||||||
|
this.element.innerHTML = `
|
||||||
|
<div class="recipes-empty">
|
||||||
|
<i class="fas fa-book-open"></i>
|
||||||
|
<p>${translate('recipes.noRecipesFound', {}, 'No recipes found that use this LoRA.')}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render recipes grid
|
||||||
|
*/
|
||||||
|
renderRecipes() {
|
||||||
|
if (!this.recipes || this.recipes.length === 0) {
|
||||||
|
this.renderEmpty();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const loraName = this.model?.model_name || '';
|
||||||
|
|
||||||
|
this.element.innerHTML = `
|
||||||
|
<div class="recipes-header">
|
||||||
|
<div class="recipes-header__text">
|
||||||
|
<span class="recipes-header__eyebrow">Linked recipes</span>
|
||||||
|
<h3>${this.recipes.length} recipe${this.recipes.length > 1 ? 's' : ''} using this LoRA</h3>
|
||||||
|
<p class="recipes-header__description">
|
||||||
|
${loraName ? `Discover workflows crafted for ${escapeHtml(loraName)}.` : 'Discover workflows crafted for this model.'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<button class="recipes-header__view-all" data-action="view-all">
|
||||||
|
<i class="fas fa-external-link-alt"></i>
|
||||||
|
<span>View all recipes</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="recipes-grid">
|
||||||
|
${this.recipes.map(recipe => this.renderRecipeCard(recipe)).join('')}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
this.bindEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render a single recipe card
|
||||||
|
*/
|
||||||
|
renderRecipeCard(recipe) {
|
||||||
|
const baseModel = recipe.base_model || '';
|
||||||
|
const loras = recipe.loras || [];
|
||||||
|
const lorasCount = loras.length;
|
||||||
|
const missingLorasCount = loras.filter(lora => !lora.inLibrary && !lora.isDeleted).length;
|
||||||
|
const allLorasAvailable = missingLorasCount === 0 && lorasCount > 0;
|
||||||
|
|
||||||
|
let statusClass = 'empty';
|
||||||
|
let statusLabel = 'No linked LoRAs';
|
||||||
|
let statusTitle = 'No LoRAs in this recipe';
|
||||||
|
|
||||||
|
if (lorasCount > 0) {
|
||||||
|
if (allLorasAvailable) {
|
||||||
|
statusClass = 'ready';
|
||||||
|
statusLabel = `${lorasCount} LoRA${lorasCount > 1 ? 's' : ''} ready`;
|
||||||
|
statusTitle = 'All LoRAs available - Ready to use';
|
||||||
|
} else {
|
||||||
|
statusClass = 'missing';
|
||||||
|
statusLabel = `Missing ${missingLorasCount} of ${lorasCount}`;
|
||||||
|
statusTitle = `${missingLorasCount} of ${lorasCount} LoRAs missing`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const imageUrl = recipe.file_url ||
|
||||||
|
(recipe.file_path ? `/loras_static/root1/preview/${recipe.file_path.split('/').pop()}` :
|
||||||
|
'/loras_static/images/no-preview.png');
|
||||||
|
|
||||||
|
return `
|
||||||
|
<article class="recipe-card"
|
||||||
|
data-recipe-id="${escapeHtml(recipe.id || '')}"
|
||||||
|
data-file-path="${escapeHtml(recipe.file_path || '')}"
|
||||||
|
role="button"
|
||||||
|
tabindex="0"
|
||||||
|
aria-label="${recipe.title ? `View recipe ${escapeHtml(recipe.title)}` : 'View recipe details'}">
|
||||||
|
<div class="recipe-card__media">
|
||||||
|
<img src="${escapeHtml(imageUrl)}"
|
||||||
|
alt="${recipe.title ? escapeHtml(recipe.title) + ' preview' : 'Recipe preview'}"
|
||||||
|
loading="lazy">
|
||||||
|
<div class="recipe-card__media-top">
|
||||||
|
<button class="recipe-card__copy" data-action="copy-recipe" title="Copy recipe syntax">
|
||||||
|
<i class="fas fa-copy"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="recipe-card__body">
|
||||||
|
<h4 class="recipe-card__title" title="${escapeHtml(recipe.title || 'Untitled recipe')}">
|
||||||
|
${escapeHtml(recipe.title || 'Untitled recipe')}
|
||||||
|
</h4>
|
||||||
|
<div class="recipe-card__meta">
|
||||||
|
${baseModel ? `<span class="recipe-card__badge recipe-card__badge--base">${escapeHtml(baseModel)}</span>` : ''}
|
||||||
|
<span class="recipe-card__badge recipe-card__badge--${statusClass}" title="${escapeHtml(statusTitle)}">
|
||||||
|
<i class="fas fa-layer-group"></i>
|
||||||
|
<span>${escapeHtml(statusLabel)}</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div class="recipe-card__cta">
|
||||||
|
<span>View details</span>
|
||||||
|
<i class="fas fa-arrow-right"></i>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind event listeners
|
||||||
|
*/
|
||||||
|
bindEvents() {
|
||||||
|
this.element.addEventListener('click', async (e) => {
|
||||||
|
const target = e.target.closest('[data-action]');
|
||||||
|
|
||||||
|
if (target) {
|
||||||
|
const action = target.dataset.action;
|
||||||
|
|
||||||
|
if (action === 'view-all') {
|
||||||
|
await this.navigateToRecipesPage();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (action === 'copy-recipe') {
|
||||||
|
const card = target.closest('.recipe-card');
|
||||||
|
const recipeId = card?.dataset.recipeId;
|
||||||
|
if (recipeId) {
|
||||||
|
e.stopPropagation();
|
||||||
|
this.copyRecipeSyntax(recipeId);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Card click - navigate to recipe
|
||||||
|
const card = e.target.closest('.recipe-card');
|
||||||
|
if (card && !e.target.closest('[data-action]')) {
|
||||||
|
const recipeId = card.dataset.recipeId;
|
||||||
|
if (recipeId) {
|
||||||
|
await this.navigateToRecipeDetails(recipeId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Keyboard navigation for cards
|
||||||
|
this.element.addEventListener('keydown', async (e) => {
|
||||||
|
if (e.key === 'Enter' || e.key === ' ') {
|
||||||
|
const card = e.target.closest('.recipe-card');
|
||||||
|
if (card) {
|
||||||
|
e.preventDefault();
|
||||||
|
const recipeId = card.dataset.recipeId;
|
||||||
|
if (recipeId) {
|
||||||
|
await this.navigateToRecipeDetails(recipeId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy recipe syntax to clipboard
|
||||||
|
*/
|
||||||
|
async copyRecipeSyntax(recipeId) {
|
||||||
|
if (!recipeId) {
|
||||||
|
showToast('toast.recipes.noRecipeId', {}, 'error');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/lm/recipe/${recipeId}/syntax`);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (data.success && data.syntax) {
|
||||||
|
await copyToClipboard(data.syntax, 'Recipe syntax copied to clipboard');
|
||||||
|
} else {
|
||||||
|
throw new Error(data.error || 'No syntax returned');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to copy recipe syntax:', err);
|
||||||
|
showToast('toast.recipes.copyFailed', { message: err.message }, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigate to recipes page with filter
|
||||||
|
*/
|
||||||
|
async navigateToRecipesPage() {
|
||||||
|
// Close the modal
|
||||||
|
const { ModelModal } = await import('./ModelModal.js');
|
||||||
|
ModelModal.close();
|
||||||
|
|
||||||
|
// Clear any previous filters
|
||||||
|
removeSessionItem('filterLoraName');
|
||||||
|
removeSessionItem('filterLoraHash');
|
||||||
|
removeSessionItem('viewRecipeId');
|
||||||
|
|
||||||
|
// Store the LoRA name and hash filter in sessionStorage
|
||||||
|
setSessionItem('lora_to_recipe_filterLoraName', this.model?.model_name || '');
|
||||||
|
setSessionItem('lora_to_recipe_filterLoraHash', this.model?.sha256 || '');
|
||||||
|
|
||||||
|
// Navigate to recipes page
|
||||||
|
window.location.href = '/loras/recipes';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigate to specific recipe details
|
||||||
|
*/
|
||||||
|
async navigateToRecipeDetails(recipeId) {
|
||||||
|
// Close the modal
|
||||||
|
const { ModelModal } = await import('./ModelModal.js');
|
||||||
|
ModelModal.close();
|
||||||
|
|
||||||
|
// Clear any previous filters
|
||||||
|
removeSessionItem('filterLoraName');
|
||||||
|
removeSessionItem('filterLoraHash');
|
||||||
|
removeSessionItem('viewRecipeId');
|
||||||
|
|
||||||
|
// Store the recipe ID in sessionStorage to load on recipes page
|
||||||
|
setSessionItem('viewRecipeId', recipeId);
|
||||||
|
|
||||||
|
// Navigate to recipes page
|
||||||
|
window.location.href = '/loras/recipes';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh recipes
|
||||||
|
*/
|
||||||
|
async refresh() {
|
||||||
|
await this.loadRecipes();
|
||||||
|
}
|
||||||
|
}
|
||||||
1501
static/js/components/model-modal/Showcase.js
Normal file
1501
static/js/components/model-modal/Showcase.js
Normal file
File diff suppressed because it is too large
Load Diff
627
static/js/components/model-modal/VersionsTab.js
Normal file
627
static/js/components/model-modal/VersionsTab.js
Normal file
@@ -0,0 +1,627 @@
|
|||||||
|
/**
|
||||||
|
* VersionsTab - Model versions list component
|
||||||
|
* Features:
|
||||||
|
* - Version cards with preview, badges, and actions
|
||||||
|
* - Download/Delete/Ignore actions
|
||||||
|
* - Base model filter toggle
|
||||||
|
* - Reference: static/js/components/shared/ModelVersionsTab.js
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { escapeHtml, formatFileSize } from '../shared/utils.js';
|
||||||
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
|
import { showToast } from '../../utils/uiHelpers.js';
|
||||||
|
import { getModelApiClient } from '../../api/modelApiFactory.js';
|
||||||
|
import { downloadManager } from '../../managers/DownloadManager.js';
|
||||||
|
import { modalManager } from '../../managers/ModalManager.js';
|
||||||
|
|
||||||
|
const VIDEO_EXTENSIONS = ['.mp4', '.webm', '.mov', '.mkv'];
|
||||||
|
const PREVIEW_PLACEHOLDER_URL = '/loras_static/images/no-preview.png';
|
||||||
|
|
||||||
|
const DISPLAY_FILTER_MODES = Object.freeze({
|
||||||
|
SAME_BASE: 'same_base',
|
||||||
|
ANY: 'any',
|
||||||
|
});
|
||||||
|
|
||||||
|
export class VersionsTab {
|
||||||
|
constructor(container) {
|
||||||
|
this.element = container;
|
||||||
|
this.model = null;
|
||||||
|
this.modelType = null;
|
||||||
|
this.versions = [];
|
||||||
|
this.isLoading = false;
|
||||||
|
this.displayMode = DISPLAY_FILTER_MODES.ANY;
|
||||||
|
this.record = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the versions tab
|
||||||
|
*/
|
||||||
|
async render({ model, modelType }) {
|
||||||
|
this.model = model;
|
||||||
|
this.modelType = modelType;
|
||||||
|
this.element.innerHTML = this.getLoadingTemplate();
|
||||||
|
|
||||||
|
await this.loadVersions();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get loading template
|
||||||
|
*/
|
||||||
|
getLoadingTemplate() {
|
||||||
|
return `
|
||||||
|
<div class="versions-loading">
|
||||||
|
<i class="fas fa-spinner fa-spin"></i>
|
||||||
|
<span>${translate('modals.model.loading.versions', {}, 'Loading versions...')}</span>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load versions from API
|
||||||
|
*/
|
||||||
|
async loadVersions() {
|
||||||
|
const modelId = this.model?.civitai?.modelId;
|
||||||
|
|
||||||
|
if (!modelId) {
|
||||||
|
this.renderError(translate('modals.model.versions.missingModelId', {}, 'This model is missing a Civitai model id.'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isLoading = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
const response = await client.fetchModelUpdateVersions(modelId, { refresh: false });
|
||||||
|
|
||||||
|
if (!response?.success) {
|
||||||
|
throw new Error(response?.error || 'Failed to load versions');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.record = response.record;
|
||||||
|
this.renderVersions();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load versions:', error);
|
||||||
|
this.renderError(error.message);
|
||||||
|
} finally {
|
||||||
|
this.isLoading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render error state
|
||||||
|
*/
|
||||||
|
renderError(message) {
|
||||||
|
this.element.innerHTML = `
|
||||||
|
<div class="versions-error">
|
||||||
|
<i class="fas fa-exclamation-triangle"></i>
|
||||||
|
<p>${escapeHtml(message || translate('modals.model.versions.error', {}, 'Failed to load versions.'))}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render empty state
|
||||||
|
*/
|
||||||
|
renderEmpty() {
|
||||||
|
this.element.innerHTML = `
|
||||||
|
<div class="versions-empty">
|
||||||
|
<i class="fas fa-info-circle"></i>
|
||||||
|
<p>${translate('modals.model.versions.empty', {}, 'No version history available for this model yet.')}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render versions list
|
||||||
|
*/
|
||||||
|
renderVersions() {
|
||||||
|
if (!this.record || !Array.isArray(this.record.versions) || this.record.versions.length === 0) {
|
||||||
|
this.renderEmpty();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentVersionId = this.model?.civitai?.versionId;
|
||||||
|
const sortedVersions = [...this.record.versions].sort((a, b) => Number(b.versionId) - Number(a.versionId));
|
||||||
|
|
||||||
|
// Filter versions based on display mode
|
||||||
|
const filteredVersions = this.filterVersions(sortedVersions, currentVersionId);
|
||||||
|
|
||||||
|
if (filteredVersions.length === 0) {
|
||||||
|
this.renderFilteredEmpty();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.element.innerHTML = `
|
||||||
|
${this.renderToolbar()}
|
||||||
|
<div class="versions-list">
|
||||||
|
${filteredVersions.map(version => this.renderVersionCard(version, currentVersionId)).join('')}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
this.bindEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter versions based on display mode
|
||||||
|
*/
|
||||||
|
filterVersions(versions, currentVersionId) {
|
||||||
|
const currentVersion = versions.find(v => v.versionId === currentVersionId);
|
||||||
|
const currentBaseModel = currentVersion?.baseModel;
|
||||||
|
|
||||||
|
if (this.displayMode !== DISPLAY_FILTER_MODES.SAME_BASE || !currentBaseModel) {
|
||||||
|
return versions;
|
||||||
|
}
|
||||||
|
|
||||||
|
return versions.filter(version => {
|
||||||
|
const versionBase = version.baseModel?.toLowerCase().trim();
|
||||||
|
const targetBase = currentBaseModel.toLowerCase().trim();
|
||||||
|
return versionBase === targetBase;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render filtered empty state
|
||||||
|
*/
|
||||||
|
renderFilteredEmpty() {
|
||||||
|
const currentVersion = this.record.versions.find(v => v.versionId === this.model?.civitai?.versionId);
|
||||||
|
const baseModelLabel = currentVersion?.baseModel || translate('modals.model.metadata.unknown', {}, 'Unknown');
|
||||||
|
|
||||||
|
this.element.innerHTML = `
|
||||||
|
${this.renderToolbar()}
|
||||||
|
<div class="versions-empty versions-empty-filter">
|
||||||
|
<i class="fas fa-info-circle"></i>
|
||||||
|
<p>${translate('modals.model.versions.filters.empty', { baseModel: baseModelLabel }, 'No versions match the current base model filter.')}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
this.bindEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render toolbar with actions
|
||||||
|
*/
|
||||||
|
renderToolbar() {
|
||||||
|
const ignoreText = this.record.shouldIgnore
|
||||||
|
? translate('modals.model.versions.actions.resumeModelUpdates', {}, 'Resume updates for this model')
|
||||||
|
: translate('modals.model.versions.actions.ignoreModelUpdates', {}, 'Ignore updates for this model');
|
||||||
|
|
||||||
|
const isFilteringActive = this.displayMode === DISPLAY_FILTER_MODES.SAME_BASE;
|
||||||
|
const toggleTooltip = isFilteringActive
|
||||||
|
? translate('modals.model.versions.filters.tooltip.showAllVersions', {}, 'Switch to showing all versions')
|
||||||
|
: translate('modals.model.versions.filters.tooltip.showSameBaseVersions', {}, 'Switch to showing only versions with the current base model');
|
||||||
|
|
||||||
|
return `
|
||||||
|
<header class="versions-toolbar">
|
||||||
|
<div class="versions-toolbar-info">
|
||||||
|
<div class="versions-toolbar-info-heading">
|
||||||
|
<h3>${translate('modals.model.versions.heading', {}, 'Model versions')}</h3>
|
||||||
|
<button class="versions-filter-toggle ${isFilteringActive ? 'active' : ''}"
|
||||||
|
data-action="toggle-filter"
|
||||||
|
title="${escapeHtml(toggleTooltip)}"
|
||||||
|
type="button">
|
||||||
|
<i class="fas fa-th-list"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p>${translate('modals.model.versions.copy', { count: this.record.versions.length }, 'Track and manage every version of this model in one place.')}</p>
|
||||||
|
</div>
|
||||||
|
<div class="versions-toolbar-actions">
|
||||||
|
<button class="versions-toolbar-btn versions-toolbar-btn-primary" data-action="toggle-model-ignore">
|
||||||
|
${escapeHtml(ignoreText)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render a single version card
|
||||||
|
*/
|
||||||
|
renderVersionCard(version, currentVersionId) {
|
||||||
|
const isCurrent = version.versionId === currentVersionId;
|
||||||
|
const isInLibrary = version.isInLibrary;
|
||||||
|
const isNewer = this.isNewerVersion(version);
|
||||||
|
const badges = this.buildBadges(version, isCurrent, isNewer);
|
||||||
|
const actions = this.buildActions(version);
|
||||||
|
|
||||||
|
const metaParts = [];
|
||||||
|
if (version.baseModel) metaParts.push(`<span class="version-meta-primary">${escapeHtml(version.baseModel)}</span>`);
|
||||||
|
if (version.releasedAt) {
|
||||||
|
const date = new Date(version.releasedAt);
|
||||||
|
if (!isNaN(date.getTime())) {
|
||||||
|
metaParts.push(escapeHtml(date.toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' })));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (version.sizeBytes > 0) metaParts.push(escapeHtml(formatFileSize(version.sizeBytes)));
|
||||||
|
|
||||||
|
const metaMarkup = metaParts.length > 0
|
||||||
|
? metaParts.map(m => `<span class="version-meta-item">${m}</span>`).join('<span class="version-meta-separator">•</span>')
|
||||||
|
: escapeHtml(translate('modals.model.versions.labels.noDetails', {}, 'No additional details'));
|
||||||
|
|
||||||
|
const civitaiUrl = this.buildCivitaiUrl(version.modelId, version.versionId);
|
||||||
|
const clickAction = civitaiUrl ? `data-civitai-url="${escapeHtml(civitaiUrl)}"` : '';
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="version-card ${isCurrent ? 'is-current' : ''} ${civitaiUrl ? 'is-clickable' : ''}"
|
||||||
|
data-version-id="${version.versionId}"
|
||||||
|
${clickAction}>
|
||||||
|
${this.renderMedia(version)}
|
||||||
|
<div class="version-details">
|
||||||
|
<div class="version-title">
|
||||||
|
<span class="version-name">${escapeHtml(version.name || translate('modals.model.versions.labels.unnamed', {}, 'Untitled Version'))}</span>
|
||||||
|
</div>
|
||||||
|
<div class="version-badges">${badges}</div>
|
||||||
|
<div class="version-meta">${metaMarkup}</div>
|
||||||
|
</div>
|
||||||
|
<div class="version-actions">
|
||||||
|
${actions}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if version is newer than any in library
|
||||||
|
*/
|
||||||
|
isNewerVersion(version) {
|
||||||
|
if (!this.record?.inLibraryVersionIds?.length) return false;
|
||||||
|
if (version.isInLibrary) return false;
|
||||||
|
const maxInLibrary = Math.max(...this.record.inLibraryVersionIds);
|
||||||
|
return version.versionId > maxInLibrary;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build badges HTML
|
||||||
|
*/
|
||||||
|
buildBadges(version, isCurrent, isNewer) {
|
||||||
|
const badges = [];
|
||||||
|
|
||||||
|
if (isCurrent) {
|
||||||
|
badges.push(this.createBadge(
|
||||||
|
translate('modals.model.versions.badges.current', {}, 'Current Version'),
|
||||||
|
'current'
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (version.isInLibrary) {
|
||||||
|
badges.push(this.createBadge(
|
||||||
|
translate('modals.model.versions.badges.inLibrary', {}, 'In Library'),
|
||||||
|
'success'
|
||||||
|
));
|
||||||
|
} else if (isNewer && !version.shouldIgnore) {
|
||||||
|
badges.push(this.createBadge(
|
||||||
|
translate('modals.model.versions.badges.newer', {}, 'Newer Version'),
|
||||||
|
'info'
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (version.shouldIgnore) {
|
||||||
|
badges.push(this.createBadge(
|
||||||
|
translate('modals.model.versions.badges.ignored', {}, 'Ignored'),
|
||||||
|
'muted'
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
return badges.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a badge element
|
||||||
|
*/
|
||||||
|
createBadge(label, tone) {
|
||||||
|
return `<span class="version-badge version-badge-${tone}">${escapeHtml(label)}</span>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build actions HTML
|
||||||
|
*/
|
||||||
|
buildActions(version) {
|
||||||
|
const actions = [];
|
||||||
|
|
||||||
|
if (!version.isInLibrary) {
|
||||||
|
actions.push(`
|
||||||
|
<button class="version-action version-action-primary" data-action="download">
|
||||||
|
${escapeHtml(translate('modals.model.versions.actions.download', {}, 'Download'))}
|
||||||
|
</button>
|
||||||
|
`);
|
||||||
|
} else if (version.filePath) {
|
||||||
|
actions.push(`
|
||||||
|
<button class="version-action version-action-danger" data-action="delete">
|
||||||
|
${escapeHtml(translate('modals.model.versions.actions.delete', {}, 'Delete'))}
|
||||||
|
</button>
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const ignoreLabel = version.shouldIgnore
|
||||||
|
? translate('modals.model.versions.actions.unignore', {}, 'Unignore')
|
||||||
|
: translate('modals.model.versions.actions.ignore', {}, 'Ignore');
|
||||||
|
|
||||||
|
actions.push(`
|
||||||
|
<button class="version-action version-action-ghost" data-action="toggle-ignore">
|
||||||
|
${escapeHtml(ignoreLabel)}
|
||||||
|
</button>
|
||||||
|
`);
|
||||||
|
|
||||||
|
return actions.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render media (image/video)
|
||||||
|
*/
|
||||||
|
renderMedia(version) {
|
||||||
|
if (!version.previewUrl) {
|
||||||
|
return `
|
||||||
|
<div class="version-media version-media-placeholder">
|
||||||
|
${escapeHtml(translate('modals.model.versions.media.placeholder', {}, 'No preview'))}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isVideoUrl(version.previewUrl)) {
|
||||||
|
return `
|
||||||
|
<div class="version-media">
|
||||||
|
<video src="${escapeHtml(version.previewUrl)}"
|
||||||
|
controls muted loop playsinline preload="metadata">
|
||||||
|
</video>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="version-media">
|
||||||
|
<img src="${escapeHtml(version.previewUrl)}"
|
||||||
|
alt="${escapeHtml(version.name || 'preview')}"
|
||||||
|
loading="lazy">
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if URL is a video
|
||||||
|
*/
|
||||||
|
isVideoUrl(url) {
|
||||||
|
if (!url) return false;
|
||||||
|
const extension = url.split('.').pop()?.toLowerCase()?.split('?')[0];
|
||||||
|
return VIDEO_EXTENSIONS.includes(`.${extension}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build Civitai URL
|
||||||
|
*/
|
||||||
|
buildCivitaiUrl(modelId, versionId) {
|
||||||
|
if (!modelId || !versionId) return null;
|
||||||
|
return `https://civitai.com/models/${encodeURIComponent(modelId)}?modelVersionId=${encodeURIComponent(versionId)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind event listeners
|
||||||
|
*/
|
||||||
|
bindEvents() {
|
||||||
|
this.element.addEventListener('click', (e) => {
|
||||||
|
const target = e.target.closest('[data-action]');
|
||||||
|
if (!target) {
|
||||||
|
// Check if clicked on a clickable card
|
||||||
|
const card = e.target.closest('.version-card.is-clickable');
|
||||||
|
if (card && !e.target.closest('.version-actions')) {
|
||||||
|
const url = card.dataset.civitaiUrl;
|
||||||
|
if (url) window.open(url, '_blank', 'noopener,noreferrer');
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const action = target.dataset.action;
|
||||||
|
const card = target.closest('.version-card');
|
||||||
|
const versionId = card ? parseInt(card.dataset.versionId, 10) : null;
|
||||||
|
|
||||||
|
switch (action) {
|
||||||
|
case 'toggle-filter':
|
||||||
|
this.toggleFilterMode();
|
||||||
|
break;
|
||||||
|
case 'toggle-model-ignore':
|
||||||
|
this.handleToggleModelIgnore();
|
||||||
|
break;
|
||||||
|
case 'download':
|
||||||
|
if (versionId) this.handleDownload(versionId, target);
|
||||||
|
break;
|
||||||
|
case 'delete':
|
||||||
|
if (versionId) this.handleDelete(versionId, target);
|
||||||
|
break;
|
||||||
|
case 'toggle-ignore':
|
||||||
|
if (versionId) this.handleToggleVersionIgnore(versionId, target);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Toggle filter mode
|
||||||
|
*/
|
||||||
|
toggleFilterMode() {
|
||||||
|
this.displayMode = this.displayMode === DISPLAY_FILTER_MODES.SAME_BASE
|
||||||
|
? DISPLAY_FILTER_MODES.ANY
|
||||||
|
: DISPLAY_FILTER_MODES.SAME_BASE;
|
||||||
|
this.renderVersions();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle toggle model ignore
|
||||||
|
*/
|
||||||
|
async handleToggleModelIgnore() {
|
||||||
|
if (!this.record) return;
|
||||||
|
|
||||||
|
const modelId = this.record.modelId;
|
||||||
|
const nextValue = !this.record.shouldIgnore;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
const response = await client.setModelUpdateIgnore(modelId, nextValue);
|
||||||
|
|
||||||
|
if (!response?.success) {
|
||||||
|
throw new Error(response?.error || 'Request failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.record = response.record;
|
||||||
|
this.renderVersions();
|
||||||
|
|
||||||
|
const toastKey = nextValue
|
||||||
|
? 'modals.model.versions.toast.modelIgnored'
|
||||||
|
: 'modals.model.versions.toast.modelResumed';
|
||||||
|
showToast(toastKey, {}, 'success');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to toggle model ignore:', error);
|
||||||
|
showToast(error?.message || 'Failed to update ignore preference', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle download version
|
||||||
|
*/
|
||||||
|
async handleDownload(versionId, button) {
|
||||||
|
const version = this.record.versions.find(v => v.versionId === versionId);
|
||||||
|
if (!version) return;
|
||||||
|
|
||||||
|
button.disabled = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await downloadManager.downloadVersionWithDefaults(
|
||||||
|
this.modelType,
|
||||||
|
this.record.modelId,
|
||||||
|
versionId,
|
||||||
|
{ versionName: version.name || `#${versionId}` }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Reload versions after download starts
|
||||||
|
setTimeout(() => this.loadVersions(), 1000);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to download version:', error);
|
||||||
|
} finally {
|
||||||
|
button.disabled = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle delete version
|
||||||
|
*/
|
||||||
|
async handleDelete(versionId, button) {
|
||||||
|
const version = this.record.versions.find(v => v.versionId === versionId);
|
||||||
|
if (!version?.filePath) return;
|
||||||
|
|
||||||
|
const confirmed = await this.showDeleteConfirmation(version);
|
||||||
|
if (!confirmed) return;
|
||||||
|
|
||||||
|
button.disabled = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
await client.deleteModel(version.filePath);
|
||||||
|
|
||||||
|
showToast('modals.model.versions.toast.versionDeleted', {}, 'success');
|
||||||
|
await this.loadVersions();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to delete version:', error);
|
||||||
|
showToast(error?.message || 'Failed to delete version', {}, 'error');
|
||||||
|
button.disabled = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show delete confirmation modal
|
||||||
|
*/
|
||||||
|
async showDeleteConfirmation(version) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const modalRecord = modalManager?.getModal?.('deleteModal');
|
||||||
|
if (!modalRecord?.element) {
|
||||||
|
// Fallback to browser confirm
|
||||||
|
const message = translate('modals.model.versions.confirm.delete', {}, 'Delete this version from your library?');
|
||||||
|
resolve(window.confirm(message));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const title = translate('modals.model.versions.actions.delete', {}, 'Delete');
|
||||||
|
const message = translate('modals.model.versions.confirm.delete', {}, 'Delete this version from your library?');
|
||||||
|
const versionName = version.name || translate('modals.model.versions.labels.unnamed', {}, 'Untitled Version');
|
||||||
|
|
||||||
|
const content = `
|
||||||
|
<div class="modal-content delete-modal-content version-delete-modal">
|
||||||
|
<h2>${escapeHtml(title)}</h2>
|
||||||
|
<p class="delete-message">${escapeHtml(message)}</p>
|
||||||
|
<div class="delete-model-info">
|
||||||
|
<div class="delete-preview">
|
||||||
|
${version.previewUrl ? `
|
||||||
|
<img src="${escapeHtml(version.previewUrl)}" alt="${escapeHtml(versionName)}"
|
||||||
|
onerror="this.src='${PREVIEW_PLACEHOLDER_URL}'">
|
||||||
|
` : `<img src="${PREVIEW_PLACEHOLDER_URL}" alt="${escapeHtml(versionName)}">`}
|
||||||
|
</div>
|
||||||
|
<div class="delete-info">
|
||||||
|
<h3>${escapeHtml(versionName)}</h3>
|
||||||
|
${version.baseModel ? `<p class="version-base-model">${escapeHtml(version.baseModel)}</p>` : ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="modal-actions">
|
||||||
|
<button class="cancel-btn" data-action="cancel">${escapeHtml(translate('common.actions.cancel', {}, 'Cancel'))}</button>
|
||||||
|
<button class="delete-btn" data-action="confirm">${escapeHtml(translate('common.actions.delete', {}, 'Delete'))}</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
modalManager.showModal('deleteModal', content);
|
||||||
|
|
||||||
|
const modalElement = modalRecord.element;
|
||||||
|
const handleAction = (e) => {
|
||||||
|
const action = e.target.closest('[data-action]')?.dataset.action;
|
||||||
|
if (action === 'confirm') {
|
||||||
|
modalManager.closeModal('deleteModal');
|
||||||
|
resolve(true);
|
||||||
|
} else if (action === 'cancel') {
|
||||||
|
modalManager.closeModal('deleteModal');
|
||||||
|
resolve(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
modalElement.addEventListener('click', handleAction, { once: true });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle toggle version ignore
|
||||||
|
*/
|
||||||
|
async handleToggleVersionIgnore(versionId, button) {
|
||||||
|
const version = this.record.versions.find(v => v.versionId === versionId);
|
||||||
|
if (!version) return;
|
||||||
|
|
||||||
|
const nextValue = !version.shouldIgnore;
|
||||||
|
button.disabled = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const client = getModelApiClient(this.modelType);
|
||||||
|
const response = await client.setVersionUpdateIgnore(
|
||||||
|
this.record.modelId,
|
||||||
|
versionId,
|
||||||
|
nextValue
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response?.success) {
|
||||||
|
throw new Error(response?.error || 'Request failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.record = response.record;
|
||||||
|
this.renderVersions();
|
||||||
|
|
||||||
|
const updatedVersion = response.record.versions.find(v => v.versionId === versionId);
|
||||||
|
const toastKey = updatedVersion?.shouldIgnore
|
||||||
|
? 'modals.model.versions.toast.versionIgnored'
|
||||||
|
: 'modals.model.versions.toast.versionUnignored';
|
||||||
|
showToast(toastKey, {}, 'success');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to toggle version ignore:', error);
|
||||||
|
showToast(error?.message || 'Failed to update version preference', {}, 'error');
|
||||||
|
button.disabled = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh versions
|
||||||
|
*/
|
||||||
|
async refresh() {
|
||||||
|
await this.loadVersions();
|
||||||
|
}
|
||||||
|
}
|
||||||
16
static/js/components/model-modal/index.js
Normal file
16
static/js/components/model-modal/index.js
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
/**
|
||||||
|
* Model Modal - New Split-View Overlay Design
|
||||||
|
* Phase 1 Implementation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { ModelModal } from './ModelModal.js';
|
||||||
|
|
||||||
|
// Export the public API
|
||||||
|
export const modelModal = {
|
||||||
|
show: ModelModal.show.bind(ModelModal),
|
||||||
|
close: ModelModal.close.bind(ModelModal),
|
||||||
|
isOpen: ModelModal.isOpen.bind(ModelModal),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Default export for convenience
|
||||||
|
export default modelModal;
|
||||||
@@ -22,6 +22,12 @@ import { loadRecipesForLora } from './RecipeTab.js';
|
|||||||
import { translate } from '../../utils/i18nHelpers.js';
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
import { state } from '../../state/index.js';
|
import { state } from '../../state/index.js';
|
||||||
|
|
||||||
|
// Import new ModelModal for split-view overlay (Phase 1)
|
||||||
|
import { modelModal as newModelModal } from '../model-modal/index.js';
|
||||||
|
|
||||||
|
// Feature flag: Use new split-view design
|
||||||
|
const USE_NEW_MODAL = true;
|
||||||
|
|
||||||
function getModalFilePath(fallback = '') {
|
function getModalFilePath(fallback = '') {
|
||||||
const modalElement = document.getElementById('modelModal');
|
const modalElement = document.getElementById('modelModal');
|
||||||
if (modalElement && modalElement.dataset && modalElement.dataset.filePath) {
|
if (modalElement && modalElement.dataset && modalElement.dataset.filePath) {
|
||||||
@@ -238,6 +244,12 @@ function renderLicenseIcons(modelData) {
|
|||||||
* @param {string} modelType - Type of model ('lora' or 'checkpoint')
|
* @param {string} modelType - Type of model ('lora' or 'checkpoint')
|
||||||
*/
|
*/
|
||||||
export async function showModelModal(model, modelType) {
|
export async function showModelModal(model, modelType) {
|
||||||
|
// Use new split-view overlay design when feature flag is enabled
|
||||||
|
if (USE_NEW_MODAL) {
|
||||||
|
return newModelModal.show(model, modelType);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy implementation below (deprecated, kept for fallback)
|
||||||
const modalId = 'modelModal';
|
const modalId = 'modelModal';
|
||||||
const modalTitle = model.model_name;
|
const modalTitle = model.model_name;
|
||||||
cleanupNavigationShortcuts();
|
cleanupNavigationShortcuts();
|
||||||
@@ -1020,11 +1032,5 @@ async function openFileLocation(filePath) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export the model modal API
|
// Re-export for compatibility
|
||||||
const modelModal = {
|
export { toggleShowcase, scrollToTop };
|
||||||
show: showModelModal,
|
|
||||||
toggleShowcase,
|
|
||||||
scrollToTop
|
|
||||||
};
|
|
||||||
|
|
||||||
export { modelModal };
|
|
||||||
|
|||||||
@@ -4,9 +4,11 @@ import {
|
|||||||
removeStorageItem
|
removeStorageItem
|
||||||
} from '../utils/storageHelpers.js';
|
} from '../utils/storageHelpers.js';
|
||||||
import { translate } from '../utils/i18nHelpers.js';
|
import { translate } from '../utils/i18nHelpers.js';
|
||||||
import { state } from '../state/index.js'
|
import { state } from '../state/index.js';
|
||||||
|
import { getModelApiClient } from '../api/modelApiFactory.js';
|
||||||
|
|
||||||
const COMMUNITY_SUPPORT_BANNER_ID = 'community-support';
|
const COMMUNITY_SUPPORT_BANNER_ID = 'community-support';
|
||||||
|
const CACHE_HEALTH_BANNER_ID = 'cache-health-warning';
|
||||||
const COMMUNITY_SUPPORT_BANNER_DELAY_MS = 5 * 24 * 60 * 60 * 1000; // 5 days
|
const COMMUNITY_SUPPORT_BANNER_DELAY_MS = 5 * 24 * 60 * 60 * 1000; // 5 days
|
||||||
const COMMUNITY_SUPPORT_FIRST_SEEN_AT_KEY = 'community_support_banner_first_seen_at';
|
const COMMUNITY_SUPPORT_FIRST_SEEN_AT_KEY = 'community_support_banner_first_seen_at';
|
||||||
const COMMUNITY_SUPPORT_VERSION_KEY = 'community_support_banner_state_version';
|
const COMMUNITY_SUPPORT_VERSION_KEY = 'community_support_banner_state_version';
|
||||||
@@ -293,6 +295,177 @@ class BannerService {
|
|||||||
location.reload();
|
location.reload();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a cache health warning banner
|
||||||
|
* @param {Object} healthData - Health data from WebSocket
|
||||||
|
*/
|
||||||
|
registerCacheHealthBanner(healthData) {
|
||||||
|
if (!healthData || healthData.status === 'healthy') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove existing cache health banner if any
|
||||||
|
this.removeBannerElement(CACHE_HEALTH_BANNER_ID);
|
||||||
|
|
||||||
|
const isCorrupted = healthData.status === 'corrupted';
|
||||||
|
const titleKey = isCorrupted
|
||||||
|
? 'banners.cacheHealth.corrupted.title'
|
||||||
|
: 'banners.cacheHealth.degraded.title';
|
||||||
|
const defaultTitle = isCorrupted
|
||||||
|
? 'Cache Corruption Detected'
|
||||||
|
: 'Cache Issues Detected';
|
||||||
|
|
||||||
|
const title = translate(titleKey, {}, defaultTitle);
|
||||||
|
|
||||||
|
const contentKey = 'banners.cacheHealth.content';
|
||||||
|
const defaultContent = 'Found {invalid} of {total} cache entries are invalid ({rate}). This may cause missing models or errors. Rebuilding the cache is recommended.';
|
||||||
|
const content = translate(contentKey, {
|
||||||
|
invalid: healthData.details?.invalid || 0,
|
||||||
|
total: healthData.details?.total || 0,
|
||||||
|
rate: healthData.details?.corruption_rate || '0%'
|
||||||
|
}, defaultContent);
|
||||||
|
|
||||||
|
this.registerBanner(CACHE_HEALTH_BANNER_ID, {
|
||||||
|
id: CACHE_HEALTH_BANNER_ID,
|
||||||
|
title: title,
|
||||||
|
content: content,
|
||||||
|
pageType: healthData.pageType,
|
||||||
|
actions: [
|
||||||
|
{
|
||||||
|
text: translate('banners.cacheHealth.rebuildCache', {}, 'Rebuild Cache'),
|
||||||
|
icon: 'fas fa-sync-alt',
|
||||||
|
action: 'rebuild-cache',
|
||||||
|
type: 'primary'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: translate('banners.cacheHealth.dismiss', {}, 'Dismiss'),
|
||||||
|
icon: 'fas fa-times',
|
||||||
|
action: 'dismiss',
|
||||||
|
type: 'secondary'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
dismissible: true,
|
||||||
|
priority: 10, // High priority
|
||||||
|
onRegister: (bannerElement) => {
|
||||||
|
// Attach click handlers for actions
|
||||||
|
const rebuildBtn = bannerElement.querySelector('[data-action="rebuild-cache"]');
|
||||||
|
const dismissBtn = bannerElement.querySelector('[data-action="dismiss"]');
|
||||||
|
|
||||||
|
if (rebuildBtn) {
|
||||||
|
rebuildBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.handleRebuildCache(bannerElement, healthData.pageType);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dismissBtn) {
|
||||||
|
dismissBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.dismissBanner(CACHE_HEALTH_BANNER_ID);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle rebuild cache action from banner
|
||||||
|
* @param {HTMLElement} bannerElement - The banner element
|
||||||
|
* @param {string} pageType - The page type (loras, checkpoints, embeddings)
|
||||||
|
*/
|
||||||
|
async handleRebuildCache(bannerElement, pageType) {
|
||||||
|
const currentPageType = pageType || this.getCurrentPageType();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const apiClient = getModelApiClient(currentPageType);
|
||||||
|
|
||||||
|
// Update banner to show rebuilding status
|
||||||
|
const actionsContainer = bannerElement.querySelector('.banner-actions');
|
||||||
|
if (actionsContainer) {
|
||||||
|
actionsContainer.innerHTML = `
|
||||||
|
<span class="banner-loading">
|
||||||
|
<i class="fas fa-spinner fa-spin"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.rebuilding', {}, 'Rebuilding cache...')}</span>
|
||||||
|
</span>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await apiClient.refreshModels(true);
|
||||||
|
|
||||||
|
// Remove banner on success without marking as dismissed
|
||||||
|
this.removeBannerElement(CACHE_HEALTH_BANNER_ID);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Cache rebuild failed:', error);
|
||||||
|
|
||||||
|
const actionsContainer = bannerElement.querySelector('.banner-actions');
|
||||||
|
if (actionsContainer) {
|
||||||
|
actionsContainer.innerHTML = `
|
||||||
|
<span class="banner-error">
|
||||||
|
<i class="fas fa-exclamation-triangle"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.rebuildFailed', {}, 'Rebuild failed. Please try again.')}</span>
|
||||||
|
</span>
|
||||||
|
<a href="#" class="banner-action banner-action-primary" data-action="rebuild-cache">
|
||||||
|
<i class="fas fa-sync-alt"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.retry', {}, 'Retry')}</span>
|
||||||
|
</a>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Re-attach click handler
|
||||||
|
const retryBtn = actionsContainer.querySelector('[data-action="rebuild-cache"]');
|
||||||
|
if (retryBtn) {
|
||||||
|
retryBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.handleRebuildCache(bannerElement, pageType);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current page type from the URL
|
||||||
|
* @returns {string} Page type (loras, checkpoints, embeddings, recipes)
|
||||||
|
*/
|
||||||
|
getCurrentPageType() {
|
||||||
|
const path = window.location.pathname;
|
||||||
|
if (path.includes('/checkpoints')) return 'checkpoints';
|
||||||
|
if (path.includes('/embeddings')) return 'embeddings';
|
||||||
|
if (path.includes('/recipes')) return 'recipes';
|
||||||
|
return 'loras';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the rebuild cache endpoint for the given page type
|
||||||
|
* @param {string} pageType - The page type
|
||||||
|
* @returns {string} The API endpoint URL
|
||||||
|
*/
|
||||||
|
getRebuildEndpoint(pageType) {
|
||||||
|
const endpoints = {
|
||||||
|
'loras': '/api/lm/loras/reload?rebuild=true',
|
||||||
|
'checkpoints': '/api/lm/checkpoints/reload?rebuild=true',
|
||||||
|
'embeddings': '/api/lm/embeddings/reload?rebuild=true'
|
||||||
|
};
|
||||||
|
return endpoints[pageType] || endpoints['loras'];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a banner element from DOM without marking as dismissed
|
||||||
|
* @param {string} bannerId - Banner ID to remove
|
||||||
|
*/
|
||||||
|
removeBannerElement(bannerId) {
|
||||||
|
const bannerElement = document.querySelector(`[data-banner-id="${bannerId}"]`);
|
||||||
|
if (bannerElement) {
|
||||||
|
bannerElement.style.animation = 'banner-slide-up 0.3s ease-in-out forwards';
|
||||||
|
setTimeout(() => {
|
||||||
|
bannerElement.remove();
|
||||||
|
this.updateContainerVisibility();
|
||||||
|
}, 300);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also remove from banners map
|
||||||
|
this.banners.delete(bannerId);
|
||||||
|
}
|
||||||
|
|
||||||
prepareCommunitySupportBanner() {
|
prepareCommunitySupportBanner() {
|
||||||
if (this.isBannerDismissed(COMMUNITY_SUPPORT_BANNER_ID)) {
|
if (this.isBannerDismissed(COMMUNITY_SUPPORT_BANNER_ID)) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ export class ExampleImagesManager {
|
|||||||
// Auto download properties
|
// Auto download properties
|
||||||
this.autoDownloadInterval = null;
|
this.autoDownloadInterval = null;
|
||||||
this.lastAutoDownloadCheck = 0;
|
this.lastAutoDownloadCheck = 0;
|
||||||
this.autoDownloadCheckInterval = 10 * 60 * 1000; // 10 minutes in milliseconds
|
this.autoDownloadCheckInterval = 30 * 60 * 1000; // 30 minutes in milliseconds
|
||||||
this.pageInitTime = Date.now(); // Track when page was initialized
|
this.pageInitTime = Date.now(); // Track when page was initialized
|
||||||
|
|
||||||
// Initialize download path field and check download status
|
// Initialize download path field and check download status
|
||||||
@@ -808,19 +808,58 @@ export class ExampleImagesManager {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.lastAutoDownloadCheck = now;
|
|
||||||
|
|
||||||
if (!this.canAutoDownload()) {
|
if (!this.canAutoDownload()) {
|
||||||
console.log('Auto download conditions not met, skipping check');
|
console.log('Auto download conditions not met, skipping check');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.log('Performing auto download check...');
|
console.log('Performing auto download pre-check...');
|
||||||
|
|
||||||
|
// Step 1: Lightweight pre-check to see if any work is needed
|
||||||
|
const checkResponse = await fetch('/api/lm/check-example-images-needed', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model_types: ['lora', 'checkpoint', 'embedding']
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!checkResponse.ok) {
|
||||||
|
console.warn('Auto download pre-check HTTP error:', checkResponse.status);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkData = await checkResponse.json();
|
||||||
|
|
||||||
|
if (!checkData.success) {
|
||||||
|
console.warn('Auto download pre-check failed:', checkData.error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the check timestamp only after successful pre-check
|
||||||
|
this.lastAutoDownloadCheck = now;
|
||||||
|
|
||||||
|
// If download already in progress, skip
|
||||||
|
if (checkData.is_downloading) {
|
||||||
|
console.log('Download already in progress, skipping auto check');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no models need downloading, skip
|
||||||
|
if (!checkData.needs_download || checkData.pending_count === 0) {
|
||||||
|
console.log(`Auto download pre-check complete: ${checkData.processed_count}/${checkData.total_models} models already processed, no work needed`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Auto download pre-check: ${checkData.pending_count} models need processing, starting download...`);
|
||||||
|
|
||||||
|
// Step 2: Start the actual download (fire-and-forget)
|
||||||
const optimize = state.global.settings.optimize_example_images;
|
const optimize = state.global.settings.optimize_example_images;
|
||||||
|
|
||||||
const response = await fetch('/api/lm/download-example-images', {
|
fetch('/api/lm/download-example-images', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
@@ -830,18 +869,29 @@ export class ExampleImagesManager {
|
|||||||
model_types: ['lora', 'checkpoint', 'embedding'],
|
model_types: ['lora', 'checkpoint', 'embedding'],
|
||||||
auto_mode: true // Flag to indicate this is an automatic download
|
auto_mode: true // Flag to indicate this is an automatic download
|
||||||
})
|
})
|
||||||
|
}).then(response => {
|
||||||
|
if (!response.ok) {
|
||||||
|
console.warn('Auto download start HTTP error:', response.status);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return response.json();
|
||||||
|
}).then(data => {
|
||||||
|
if (data && !data.success) {
|
||||||
|
console.warn('Auto download start failed:', data.error);
|
||||||
|
// If already in progress, push back the next check to avoid hammering the API
|
||||||
|
if (data.error && data.error.includes('already in progress')) {
|
||||||
|
console.log('Download already in progress, backing off next check');
|
||||||
|
this.lastAutoDownloadCheck = now + (5 * 60 * 1000); // Back off for 5 extra minutes
|
||||||
|
}
|
||||||
|
} else if (data && data.success) {
|
||||||
|
console.log('Auto download started:', data.message || 'Download started');
|
||||||
|
}
|
||||||
|
}).catch(error => {
|
||||||
|
console.error('Auto download start error:', error);
|
||||||
});
|
});
|
||||||
|
|
||||||
const data = await response.json();
|
// Immediately return without waiting for the download fetch to complete
|
||||||
|
// This keeps the UI responsive
|
||||||
if (!data.success) {
|
|
||||||
console.warn('Auto download check failed:', data.error);
|
|
||||||
// If already in progress, push back the next check to avoid hammering the API
|
|
||||||
if (data.error && data.error.includes('already in progress')) {
|
|
||||||
console.log('Download already in progress, backing off next check');
|
|
||||||
this.lastAutoDownloadCheck = now + (5 * 60 * 1000); // Back off for 5 extra minutes
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Auto download check error:', error);
|
console.error('Auto download check error:', error);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -63,6 +63,9 @@ export class FilterManager {
|
|||||||
this.initializeLicenseFilters();
|
this.initializeLicenseFilters();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize tag logic toggle
|
||||||
|
this.initializeTagLogicToggle();
|
||||||
|
|
||||||
// Add click handler for filter button
|
// Add click handler for filter button
|
||||||
if (this.filterButton) {
|
if (this.filterButton) {
|
||||||
this.filterButton.addEventListener('click', () => {
|
this.filterButton.addEventListener('click', () => {
|
||||||
@@ -84,6 +87,45 @@ export class FilterManager {
|
|||||||
this.loadFiltersFromStorage();
|
this.loadFiltersFromStorage();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
initializeTagLogicToggle() {
|
||||||
|
const toggleContainer = document.getElementById('tagLogicToggle');
|
||||||
|
if (!toggleContainer) return;
|
||||||
|
|
||||||
|
const options = toggleContainer.querySelectorAll('.tag-logic-option');
|
||||||
|
|
||||||
|
options.forEach(option => {
|
||||||
|
option.addEventListener('click', async () => {
|
||||||
|
const value = option.dataset.value;
|
||||||
|
if (this.filters.tagLogic === value) return;
|
||||||
|
|
||||||
|
this.filters.tagLogic = value;
|
||||||
|
this.updateTagLogicToggleUI();
|
||||||
|
|
||||||
|
// Auto-apply filter when logic changes
|
||||||
|
await this.applyFilters(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set initial state
|
||||||
|
this.updateTagLogicToggleUI();
|
||||||
|
}
|
||||||
|
|
||||||
|
updateTagLogicToggleUI() {
|
||||||
|
const toggleContainer = document.getElementById('tagLogicToggle');
|
||||||
|
if (!toggleContainer) return;
|
||||||
|
|
||||||
|
const options = toggleContainer.querySelectorAll('.tag-logic-option');
|
||||||
|
const currentLogic = this.filters.tagLogic || 'any';
|
||||||
|
|
||||||
|
options.forEach(option => {
|
||||||
|
if (option.dataset.value === currentLogic) {
|
||||||
|
option.classList.add('active');
|
||||||
|
} else {
|
||||||
|
option.classList.remove('active');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async loadTopTags() {
|
async loadTopTags() {
|
||||||
try {
|
try {
|
||||||
// Show loading state
|
// Show loading state
|
||||||
@@ -549,6 +591,17 @@ export class FilterManager {
|
|||||||
showToast('toast.filters.cleared', {}, 'info');
|
showToast('toast.filters.cleared', {}, 'info');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Refresh duplicates with new filters
|
||||||
|
if (window.modelDuplicatesManager) {
|
||||||
|
if (window.modelDuplicatesManager.inDuplicateMode) {
|
||||||
|
// In duplicate mode: refresh the duplicate list
|
||||||
|
await window.modelDuplicatesManager.findDuplicates();
|
||||||
|
} else {
|
||||||
|
// Not in duplicate mode: just update badge count
|
||||||
|
window.modelDuplicatesManager.checkDuplicatesCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async clearFilters() {
|
async clearFilters() {
|
||||||
@@ -562,9 +615,13 @@ export class FilterManager {
|
|||||||
baseModel: [],
|
baseModel: [],
|
||||||
tags: {},
|
tags: {},
|
||||||
license: {},
|
license: {},
|
||||||
modelTypes: []
|
modelTypes: [],
|
||||||
|
tagLogic: 'any'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Update tag logic toggle UI
|
||||||
|
this.updateTagLogicToggleUI();
|
||||||
|
|
||||||
// Update state
|
// Update state
|
||||||
const pageState = getCurrentPageState();
|
const pageState = getCurrentPageState();
|
||||||
pageState.filters = this.cloneFilters();
|
pageState.filters = this.cloneFilters();
|
||||||
@@ -609,6 +666,7 @@ export class FilterManager {
|
|||||||
pageState.filters = this.cloneFilters();
|
pageState.filters = this.cloneFilters();
|
||||||
|
|
||||||
this.updateTagSelections();
|
this.updateTagSelections();
|
||||||
|
this.updateTagLogicToggleUI();
|
||||||
this.updateActiveFiltersCount();
|
this.updateActiveFiltersCount();
|
||||||
|
|
||||||
if (this.hasActiveFilters()) {
|
if (this.hasActiveFilters()) {
|
||||||
@@ -644,7 +702,8 @@ export class FilterManager {
|
|||||||
baseModel: Array.isArray(source.baseModel) ? [...source.baseModel] : [],
|
baseModel: Array.isArray(source.baseModel) ? [...source.baseModel] : [],
|
||||||
tags: this.normalizeTagFilters(source.tags),
|
tags: this.normalizeTagFilters(source.tags),
|
||||||
license: this.shouldShowLicenseFilters() ? this.normalizeLicenseFilters(source.license) : {},
|
license: this.shouldShowLicenseFilters() ? this.normalizeLicenseFilters(source.license) : {},
|
||||||
modelTypes: this.normalizeModelTypeFilters(source.modelTypes)
|
modelTypes: this.normalizeModelTypeFilters(source.modelTypes),
|
||||||
|
tagLogic: source.tagLogic || 'any'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -726,7 +785,8 @@ export class FilterManager {
|
|||||||
baseModel: [...(this.filters.baseModel || [])],
|
baseModel: [...(this.filters.baseModel || [])],
|
||||||
tags: { ...(this.filters.tags || {}) },
|
tags: { ...(this.filters.tags || {}) },
|
||||||
license: { ...(this.filters.license || {}) },
|
license: { ...(this.filters.license || {}) },
|
||||||
modelTypes: [...(this.filters.modelTypes || [])]
|
modelTypes: [...(this.filters.modelTypes || [])],
|
||||||
|
tagLogic: this.filters.tagLogic || 'any'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -751,12 +751,7 @@ export class FilterPresetManager {
|
|||||||
|
|
||||||
const presetName = document.createElement('span');
|
const presetName = document.createElement('span');
|
||||||
presetName.className = 'preset-name';
|
presetName.className = 'preset-name';
|
||||||
|
presetName.textContent = preset.name;
|
||||||
if (isActive) {
|
|
||||||
presetName.innerHTML = `<i class="fas fa-check"></i> ${preset.name}`;
|
|
||||||
} else {
|
|
||||||
presetName.textContent = preset.name;
|
|
||||||
}
|
|
||||||
presetName.title = translate('header.filter.presetClickTooltip', { name: preset.name }, `Click to apply preset "${preset.name}"`);
|
presetName.title = translate('header.filter.presetClickTooltip', { name: preset.name }, `Click to apply preset "${preset.name}"`);
|
||||||
|
|
||||||
const deleteBtn = document.createElement('button');
|
const deleteBtn = document.createElement('button');
|
||||||
|
|||||||
@@ -27,6 +27,10 @@ export const BASE_MODELS = {
|
|||||||
FLUX_1_KREA: "Flux.1 Krea",
|
FLUX_1_KREA: "Flux.1 Krea",
|
||||||
FLUX_1_KONTEXT: "Flux.1 Kontext",
|
FLUX_1_KONTEXT: "Flux.1 Kontext",
|
||||||
FLUX_2_D: "Flux.2 D",
|
FLUX_2_D: "Flux.2 D",
|
||||||
|
FLUX_2_KLEIN_9B: "Flux.2 Klein 9B",
|
||||||
|
FLUX_2_KLEIN_9B_BASE: "Flux.2 Klein 9B-base",
|
||||||
|
FLUX_2_KLEIN_4B: "Flux.2 Klein 4B",
|
||||||
|
FLUX_2_KLEIN_4B_BASE: "Flux.2 Klein 4B-base",
|
||||||
AURAFLOW: "AuraFlow",
|
AURAFLOW: "AuraFlow",
|
||||||
CHROMA: "Chroma",
|
CHROMA: "Chroma",
|
||||||
PIXART_A: "PixArt a",
|
PIXART_A: "PixArt a",
|
||||||
@@ -40,10 +44,12 @@ export const BASE_MODELS = {
|
|||||||
HIDREAM: "HiDream",
|
HIDREAM: "HiDream",
|
||||||
QWEN: "Qwen",
|
QWEN: "Qwen",
|
||||||
ZIMAGE_TURBO: "ZImageTurbo",
|
ZIMAGE_TURBO: "ZImageTurbo",
|
||||||
|
ZIMAGE_BASE: "ZImageBase",
|
||||||
|
|
||||||
// Video models
|
// Video models
|
||||||
SVD: "SVD",
|
SVD: "SVD",
|
||||||
LTXV: "LTXV",
|
LTXV: "LTXV",
|
||||||
|
LTXV2: "LTXV2",
|
||||||
WAN_VIDEO: "Wan Video",
|
WAN_VIDEO: "Wan Video",
|
||||||
WAN_VIDEO_1_3B_T2V: "Wan Video 1.3B t2v",
|
WAN_VIDEO_1_3B_T2V: "Wan Video 1.3B t2v",
|
||||||
WAN_VIDEO_14B_T2V: "Wan Video 14B t2v",
|
WAN_VIDEO_14B_T2V: "Wan Video 14B t2v",
|
||||||
@@ -120,6 +126,10 @@ export const BASE_MODEL_ABBREVIATIONS = {
|
|||||||
[BASE_MODELS.FLUX_1_KREA]: 'F1KR',
|
[BASE_MODELS.FLUX_1_KREA]: 'F1KR',
|
||||||
[BASE_MODELS.FLUX_1_KONTEXT]: 'F1KX',
|
[BASE_MODELS.FLUX_1_KONTEXT]: 'F1KX',
|
||||||
[BASE_MODELS.FLUX_2_D]: 'F2D',
|
[BASE_MODELS.FLUX_2_D]: 'F2D',
|
||||||
|
[BASE_MODELS.FLUX_2_KLEIN_9B]: 'FK9',
|
||||||
|
[BASE_MODELS.FLUX_2_KLEIN_9B_BASE]: 'FK9B',
|
||||||
|
[BASE_MODELS.FLUX_2_KLEIN_4B]: 'FK4',
|
||||||
|
[BASE_MODELS.FLUX_2_KLEIN_4B_BASE]: 'FK4B',
|
||||||
|
|
||||||
// Other diffusion models
|
// Other diffusion models
|
||||||
[BASE_MODELS.AURAFLOW]: 'AF',
|
[BASE_MODELS.AURAFLOW]: 'AF',
|
||||||
@@ -135,10 +145,12 @@ export const BASE_MODEL_ABBREVIATIONS = {
|
|||||||
[BASE_MODELS.HIDREAM]: 'HID',
|
[BASE_MODELS.HIDREAM]: 'HID',
|
||||||
[BASE_MODELS.QWEN]: 'QWEN',
|
[BASE_MODELS.QWEN]: 'QWEN',
|
||||||
[BASE_MODELS.ZIMAGE_TURBO]: 'ZIT',
|
[BASE_MODELS.ZIMAGE_TURBO]: 'ZIT',
|
||||||
|
[BASE_MODELS.ZIMAGE_BASE]: 'ZIB',
|
||||||
|
|
||||||
// Video models
|
// Video models
|
||||||
[BASE_MODELS.SVD]: 'SVD',
|
[BASE_MODELS.SVD]: 'SVD',
|
||||||
[BASE_MODELS.LTXV]: 'LTXV',
|
[BASE_MODELS.LTXV]: 'LTXV',
|
||||||
|
[BASE_MODELS.LTXV2]: 'LTV2',
|
||||||
[BASE_MODELS.WAN_VIDEO]: 'WAN',
|
[BASE_MODELS.WAN_VIDEO]: 'WAN',
|
||||||
[BASE_MODELS.WAN_VIDEO_1_3B_T2V]: 'WAN',
|
[BASE_MODELS.WAN_VIDEO_1_3B_T2V]: 'WAN',
|
||||||
[BASE_MODELS.WAN_VIDEO_14B_T2V]: 'WAN',
|
[BASE_MODELS.WAN_VIDEO_14B_T2V]: 'WAN',
|
||||||
@@ -328,16 +340,16 @@ export const BASE_MODEL_CATEGORIES = {
|
|||||||
'Stable Diffusion 3.x': [BASE_MODELS.SD_3, BASE_MODELS.SD_3_5, BASE_MODELS.SD_3_5_MEDIUM, BASE_MODELS.SD_3_5_LARGE, BASE_MODELS.SD_3_5_LARGE_TURBO],
|
'Stable Diffusion 3.x': [BASE_MODELS.SD_3, BASE_MODELS.SD_3_5, BASE_MODELS.SD_3_5_MEDIUM, BASE_MODELS.SD_3_5_LARGE, BASE_MODELS.SD_3_5_LARGE_TURBO],
|
||||||
'SDXL': [BASE_MODELS.SDXL, BASE_MODELS.SDXL_LIGHTNING, BASE_MODELS.SDXL_HYPER],
|
'SDXL': [BASE_MODELS.SDXL, BASE_MODELS.SDXL_LIGHTNING, BASE_MODELS.SDXL_HYPER],
|
||||||
'Video Models': [
|
'Video Models': [
|
||||||
BASE_MODELS.SVD, BASE_MODELS.LTXV, BASE_MODELS.HUNYUAN_VIDEO, BASE_MODELS.WAN_VIDEO,
|
BASE_MODELS.SVD, BASE_MODELS.LTXV, BASE_MODELS.LTXV2, BASE_MODELS.HUNYUAN_VIDEO, BASE_MODELS.WAN_VIDEO,
|
||||||
BASE_MODELS.WAN_VIDEO_1_3B_T2V, BASE_MODELS.WAN_VIDEO_14B_T2V,
|
BASE_MODELS.WAN_VIDEO_1_3B_T2V, BASE_MODELS.WAN_VIDEO_14B_T2V,
|
||||||
BASE_MODELS.WAN_VIDEO_14B_I2V_480P, BASE_MODELS.WAN_VIDEO_14B_I2V_720P,
|
BASE_MODELS.WAN_VIDEO_14B_I2V_480P, BASE_MODELS.WAN_VIDEO_14B_I2V_720P,
|
||||||
BASE_MODELS.WAN_VIDEO_2_2_TI2V_5B, BASE_MODELS.WAN_VIDEO_2_2_T2V_A14B,
|
BASE_MODELS.WAN_VIDEO_2_2_TI2V_5B, BASE_MODELS.WAN_VIDEO_2_2_T2V_A14B,
|
||||||
BASE_MODELS.WAN_VIDEO_2_2_I2V_A14B
|
BASE_MODELS.WAN_VIDEO_2_2_I2V_A14B
|
||||||
],
|
],
|
||||||
'Flux Models': [BASE_MODELS.FLUX_1_D, BASE_MODELS.FLUX_1_S, BASE_MODELS.FLUX_1_KONTEXT, BASE_MODELS.FLUX_1_KREA, BASE_MODELS.FLUX_2_D],
|
'Flux Models': [BASE_MODELS.FLUX_1_D, BASE_MODELS.FLUX_1_S, BASE_MODELS.FLUX_1_KONTEXT, BASE_MODELS.FLUX_1_KREA, BASE_MODELS.FLUX_2_D, BASE_MODELS.FLUX_2_KLEIN_9B, BASE_MODELS.FLUX_2_KLEIN_9B_BASE, BASE_MODELS.FLUX_2_KLEIN_4B, BASE_MODELS.FLUX_2_KLEIN_4B_BASE],
|
||||||
'Other Models': [
|
'Other Models': [
|
||||||
BASE_MODELS.ILLUSTRIOUS, BASE_MODELS.PONY, BASE_MODELS.HIDREAM,
|
BASE_MODELS.ILLUSTRIOUS, BASE_MODELS.PONY, BASE_MODELS.HIDREAM,
|
||||||
BASE_MODELS.QWEN, BASE_MODELS.AURAFLOW, BASE_MODELS.CHROMA, BASE_MODELS.ZIMAGE_TURBO,
|
BASE_MODELS.QWEN, BASE_MODELS.AURAFLOW, BASE_MODELS.CHROMA, BASE_MODELS.ZIMAGE_TURBO, BASE_MODELS.ZIMAGE_BASE,
|
||||||
BASE_MODELS.PIXART_A, BASE_MODELS.PIXART_E, BASE_MODELS.HUNYUAN_1,
|
BASE_MODELS.PIXART_A, BASE_MODELS.PIXART_E, BASE_MODELS.HUNYUAN_1,
|
||||||
BASE_MODELS.LUMINA, BASE_MODELS.KOLORS, BASE_MODELS.NOOBAI,
|
BASE_MODELS.LUMINA, BASE_MODELS.KOLORS, BASE_MODELS.NOOBAI,
|
||||||
BASE_MODELS.UNKNOWN
|
BASE_MODELS.UNKNOWN
|
||||||
|
|||||||
@@ -150,7 +150,13 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="filter-section">
|
<div class="filter-section">
|
||||||
<h4>{{ t('header.filter.modelTags') }}</h4>
|
<div class="filter-section-header">
|
||||||
|
<h4>{{ t('header.filter.modelTags') }}</h4>
|
||||||
|
<div class="tag-logic-toggle" id="tagLogicToggle">
|
||||||
|
<button class="tag-logic-option" data-value="any" title="{{ t('header.filter.tagLogicAny') }}">{{ t('header.filter.any') }}</button>
|
||||||
|
<button class="tag-logic-option" data-value="all" title="{{ t('header.filter.tagLogicAll') }}">{{ t('header.filter.all') }}</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="filter-tags" id="modelTagsFilter">
|
<div class="filter-tags" id="modelTagsFilter">
|
||||||
<!-- Top tags will be dynamically inserted here -->
|
<!-- Top tags will be dynamically inserted here -->
|
||||||
<div class="tags-loading">{{ t('common.status.loading') }}</div>
|
<div class="tags-loading">{{ t('common.status.loading') }}</div>
|
||||||
|
|||||||
160
tests/config/test_checkpoint_path_overlap.py
Normal file
160
tests/config/test_checkpoint_path_overlap.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
"""Tests for checkpoint path overlap detection."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py import config as config_module
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize(path: str) -> str:
|
||||||
|
return os.path.normpath(path).replace(os.sep, "/")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckpointPathOverlap:
|
||||||
|
"""Test detection of overlapping paths between checkpoints and unet."""
|
||||||
|
|
||||||
|
def test_overlapping_paths_prioritizes_checkpoints(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch, tmp_path, caplog
|
||||||
|
):
|
||||||
|
"""Test that overlapping paths prioritize checkpoints for backward compatibility."""
|
||||||
|
# Create a shared physical folder
|
||||||
|
shared_dir = tmp_path / "shared_models"
|
||||||
|
shared_dir.mkdir()
|
||||||
|
|
||||||
|
# Create two symlinks pointing to the same physical folder
|
||||||
|
checkpoints_link = tmp_path / "checkpoints"
|
||||||
|
unet_link = tmp_path / "unet"
|
||||||
|
checkpoints_link.symlink_to(shared_dir, target_is_directory=True)
|
||||||
|
unet_link.symlink_to(shared_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create Config instance with overlapping paths
|
||||||
|
with caplog.at_level(logging.WARNING, logger=config_module.logger.name):
|
||||||
|
config = config_module.Config.__new__(config_module.Config)
|
||||||
|
config._path_mappings = {}
|
||||||
|
config._preview_root_paths = set()
|
||||||
|
config._cached_fingerprint = None
|
||||||
|
|
||||||
|
# Call the method under test
|
||||||
|
result = config._prepare_checkpoint_paths(
|
||||||
|
[str(checkpoints_link)], [str(unet_link)]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify warning was logged
|
||||||
|
warning_messages = [
|
||||||
|
record.message
|
||||||
|
for record in caplog.records
|
||||||
|
if record.levelname == "WARNING"
|
||||||
|
and "overlapping paths" in record.message.lower()
|
||||||
|
]
|
||||||
|
assert len(warning_messages) == 1
|
||||||
|
assert "checkpoints" in warning_messages[0].lower()
|
||||||
|
assert "diffusion_models" in warning_messages[0].lower() or "unet" in warning_messages[0].lower()
|
||||||
|
# Verify warning mentions backward compatibility fallback
|
||||||
|
assert "falling back" in warning_messages[0].lower() or "backward compatibility" in warning_messages[0].lower()
|
||||||
|
|
||||||
|
# Verify only one path is returned (deduplication still works)
|
||||||
|
assert len(result) == 1
|
||||||
|
# Prioritizes checkpoints path for backward compatibility
|
||||||
|
assert _normalize(result[0]) == _normalize(str(checkpoints_link))
|
||||||
|
|
||||||
|
# Verify checkpoints_roots has the path (prioritized)
|
||||||
|
assert len(config.checkpoints_roots) == 1
|
||||||
|
assert _normalize(config.checkpoints_roots[0]) == _normalize(str(checkpoints_link))
|
||||||
|
|
||||||
|
# Verify unet_roots is empty (overlapping paths removed)
|
||||||
|
assert config.unet_roots == []
|
||||||
|
|
||||||
|
def test_non_overlapping_paths_no_warning(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch, tmp_path, caplog
|
||||||
|
):
|
||||||
|
"""Test that non-overlapping paths do not trigger a warning."""
|
||||||
|
# Create separate physical folders
|
||||||
|
checkpoints_dir = tmp_path / "checkpoints"
|
||||||
|
checkpoints_dir.mkdir()
|
||||||
|
unet_dir = tmp_path / "unet"
|
||||||
|
unet_dir.mkdir()
|
||||||
|
|
||||||
|
# Create Config instance with separate paths
|
||||||
|
with caplog.at_level(logging.WARNING, logger=config_module.logger.name):
|
||||||
|
config = config_module.Config.__new__(config_module.Config)
|
||||||
|
config._path_mappings = {}
|
||||||
|
config._preview_root_paths = set()
|
||||||
|
config._cached_fingerprint = None
|
||||||
|
|
||||||
|
result = config._prepare_checkpoint_paths(
|
||||||
|
[str(checkpoints_dir)], [str(unet_dir)]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify no overlapping paths warning was logged
|
||||||
|
warning_messages = [
|
||||||
|
record.message
|
||||||
|
for record in caplog.records
|
||||||
|
if record.levelname == "WARNING"
|
||||||
|
and "overlapping paths" in record.message.lower()
|
||||||
|
]
|
||||||
|
assert len(warning_messages) == 0
|
||||||
|
|
||||||
|
# Verify both paths are returned
|
||||||
|
assert len(result) == 2
|
||||||
|
normalized_result = [_normalize(p) for p in result]
|
||||||
|
assert _normalize(str(checkpoints_dir)) in normalized_result
|
||||||
|
assert _normalize(str(unet_dir)) in normalized_result
|
||||||
|
|
||||||
|
# Verify both roots are properly set
|
||||||
|
assert len(config.checkpoints_roots) == 1
|
||||||
|
assert len(config.unet_roots) == 1
|
||||||
|
|
||||||
|
def test_partial_overlap_prioritizes_checkpoints(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch, tmp_path, caplog
|
||||||
|
):
|
||||||
|
"""Test partial overlap - overlapping paths prioritize checkpoints."""
|
||||||
|
# Create folders
|
||||||
|
shared_dir = tmp_path / "shared"
|
||||||
|
shared_dir.mkdir()
|
||||||
|
separate_checkpoint = tmp_path / "separate_ckpt"
|
||||||
|
separate_checkpoint.mkdir()
|
||||||
|
separate_unet = tmp_path / "separate_unet"
|
||||||
|
separate_unet.mkdir()
|
||||||
|
|
||||||
|
# Create symlinks - one shared, others separate
|
||||||
|
shared_link = tmp_path / "shared_link"
|
||||||
|
shared_link.symlink_to(shared_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
with caplog.at_level(logging.WARNING, logger=config_module.logger.name):
|
||||||
|
config = config_module.Config.__new__(config_module.Config)
|
||||||
|
config._path_mappings = {}
|
||||||
|
config._preview_root_paths = set()
|
||||||
|
config._cached_fingerprint = None
|
||||||
|
|
||||||
|
# One checkpoint path overlaps with one unet path
|
||||||
|
result = config._prepare_checkpoint_paths(
|
||||||
|
[str(shared_link), str(separate_checkpoint)],
|
||||||
|
[str(shared_link), str(separate_unet)]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify warning was logged for the overlapping path
|
||||||
|
warning_messages = [
|
||||||
|
record.message
|
||||||
|
for record in caplog.records
|
||||||
|
if record.levelname == "WARNING"
|
||||||
|
and "overlapping paths" in record.message.lower()
|
||||||
|
]
|
||||||
|
assert len(warning_messages) == 1
|
||||||
|
|
||||||
|
# Verify 3 unique paths (shared counted once as checkpoint, plus separate ones)
|
||||||
|
assert len(result) == 3
|
||||||
|
|
||||||
|
# Verify the overlapping path appears in warning message
|
||||||
|
assert str(shared_link.name) in warning_messages[0] or str(shared_dir.name) in warning_messages[0]
|
||||||
|
|
||||||
|
# Verify checkpoints_roots includes both checkpoint paths (including the shared one)
|
||||||
|
assert len(config.checkpoints_roots) == 2
|
||||||
|
checkpoint_normalized = [_normalize(p) for p in config.checkpoints_roots]
|
||||||
|
assert _normalize(str(shared_link)) in checkpoint_normalized
|
||||||
|
assert _normalize(str(separate_checkpoint)) in checkpoint_normalized
|
||||||
|
|
||||||
|
# Verify unet_roots only includes the non-overlapping unet path
|
||||||
|
assert len(config.unet_roots) == 1
|
||||||
|
assert _normalize(config.unet_roots[0]) == _normalize(str(separate_unet))
|
||||||
@@ -230,8 +230,58 @@ def test_new_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
|||||||
assert normalized_external in second_cfg._path_mappings
|
assert normalized_external in second_cfg._path_mappings
|
||||||
|
|
||||||
|
|
||||||
def test_removed_deep_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
def test_removed_first_level_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
"""Removing a deep symlink should trigger cache invalidation."""
|
"""Removing a first-level symlink should trigger cache invalidation."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create first-level symlink (directly under loras root)
|
||||||
|
external_dir = tmp_path / "external"
|
||||||
|
external_dir.mkdir()
|
||||||
|
symlink = loras_dir / "external_models"
|
||||||
|
symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Initial scan finds the symlink
|
||||||
|
first_cfg = config_module.Config()
|
||||||
|
normalized_external = _normalize(str(external_dir))
|
||||||
|
assert normalized_external in first_cfg._path_mappings
|
||||||
|
|
||||||
|
# Remove the symlink
|
||||||
|
symlink.unlink()
|
||||||
|
|
||||||
|
# Second config should detect invalid cached mapping and rescan
|
||||||
|
second_cfg = config_module.Config()
|
||||||
|
assert normalized_external not in second_cfg._path_mappings
|
||||||
|
|
||||||
|
|
||||||
|
def test_retargeted_first_level_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""Changing a first-level symlink's target should trigger cache invalidation."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create first-level symlink
|
||||||
|
target_v1 = tmp_path / "external_v1"
|
||||||
|
target_v1.mkdir()
|
||||||
|
target_v2 = tmp_path / "external_v2"
|
||||||
|
target_v2.mkdir()
|
||||||
|
|
||||||
|
symlink = loras_dir / "external_models"
|
||||||
|
symlink.symlink_to(target_v1, target_is_directory=True)
|
||||||
|
|
||||||
|
# Initial scan
|
||||||
|
first_cfg = config_module.Config()
|
||||||
|
assert _normalize(str(target_v1)) in first_cfg._path_mappings
|
||||||
|
|
||||||
|
# Retarget the symlink
|
||||||
|
symlink.unlink()
|
||||||
|
symlink.symlink_to(target_v2, target_is_directory=True)
|
||||||
|
|
||||||
|
# Second config should detect changed target and rescan
|
||||||
|
second_cfg = config_module.Config()
|
||||||
|
assert _normalize(str(target_v2)) in second_cfg._path_mappings
|
||||||
|
assert _normalize(str(target_v1)) not in second_cfg._path_mappings
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_symlink_not_scanned(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""Deep symlinks (below first level) are not scanned to avoid performance issues."""
|
||||||
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
# Create nested structure with deep symlink
|
# Create nested structure with deep symlink
|
||||||
@@ -242,46 +292,140 @@ def test_removed_deep_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, t
|
|||||||
deep_symlink = subdir / "styles"
|
deep_symlink = subdir / "styles"
|
||||||
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
# Initial scan finds the deep symlink
|
# Config should not detect deep symlinks (only first-level)
|
||||||
first_cfg = config_module.Config()
|
cfg = config_module.Config()
|
||||||
normalized_external = _normalize(str(external_dir))
|
normalized_external = _normalize(str(external_dir))
|
||||||
assert normalized_external in first_cfg._path_mappings
|
assert normalized_external not in cfg._path_mappings
|
||||||
|
|
||||||
# Remove the deep symlink
|
|
||||||
deep_symlink.unlink()
|
|
||||||
|
|
||||||
# Second config should detect invalid cached mapping and rescan
|
|
||||||
second_cfg = config_module.Config()
|
|
||||||
assert normalized_external not in second_cfg._path_mappings
|
|
||||||
|
|
||||||
|
|
||||||
def test_retargeted_deep_symlink_triggers_rescan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
def test_deep_symlink_discovered_on_preview_access(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
"""Changing a deep symlink's target should trigger cache invalidation."""
|
"""Deep symlinks are discovered dynamically when preview is accessed."""
|
||||||
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
# Create nested structure
|
# Create nested structure with deep symlink at second level
|
||||||
subdir = loras_dir / "anime"
|
subdir = loras_dir / "anime"
|
||||||
subdir.mkdir()
|
subdir.mkdir()
|
||||||
target_v1 = tmp_path / "external_v1"
|
external_dir = tmp_path / "external"
|
||||||
target_v1.mkdir()
|
external_dir.mkdir()
|
||||||
target_v2 = tmp_path / "external_v2"
|
|
||||||
target_v2.mkdir()
|
|
||||||
|
|
||||||
deep_symlink = subdir / "styles"
|
deep_symlink = subdir / "styles"
|
||||||
deep_symlink.symlink_to(target_v1, target_is_directory=True)
|
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
# Initial scan
|
# Create preview file under deep symlink
|
||||||
first_cfg = config_module.Config()
|
preview_file = deep_symlink / "model.preview.jpeg"
|
||||||
assert _normalize(str(target_v1)) in first_cfg._path_mappings
|
preview_file.write_bytes(b"preview")
|
||||||
|
|
||||||
|
# Config should not initially detect deep symlinks
|
||||||
|
cfg = config_module.Config()
|
||||||
|
normalized_external = _normalize(str(external_dir))
|
||||||
|
normalized_deep_link = _normalize(str(deep_symlink))
|
||||||
|
assert normalized_external not in cfg._path_mappings
|
||||||
|
|
||||||
|
# First preview access triggers symlink discovery automatically and returns True
|
||||||
|
is_allowed = cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
|
||||||
|
# After discovery, preview should be allowed
|
||||||
|
assert is_allowed
|
||||||
|
assert normalized_external in cfg._path_mappings
|
||||||
|
assert cfg._path_mappings[normalized_external] == normalized_deep_link
|
||||||
|
|
||||||
|
# Verify preview path is now allowed without triggering discovery again
|
||||||
|
assert cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_symlink_at_third_level(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""Deep symlinks at third level are also discovered dynamically."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create nested structure with deep symlink at third level
|
||||||
|
level1 = loras_dir / "category"
|
||||||
|
level1.mkdir()
|
||||||
|
level2 = level1 / "subcategory"
|
||||||
|
level2.mkdir()
|
||||||
|
external_dir = tmp_path / "external_deep"
|
||||||
|
external_dir.mkdir()
|
||||||
|
deep_symlink = level2 / "deep"
|
||||||
|
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create preview file under deep symlink
|
||||||
|
preview_file = deep_symlink / "preview.webp"
|
||||||
|
preview_file.write_bytes(b"test")
|
||||||
|
|
||||||
|
cfg = config_module.Config()
|
||||||
|
|
||||||
|
# First preview access triggers symlink discovery at third level
|
||||||
|
is_allowed = cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
|
||||||
|
assert is_allowed
|
||||||
|
normalized_external = _normalize(str(external_dir))
|
||||||
|
normalized_deep_link = _normalize(str(deep_symlink))
|
||||||
|
assert normalized_external in cfg._path_mappings
|
||||||
|
assert cfg._path_mappings[normalized_external] == normalized_deep_link
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_symlink_points_outside_roots(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""Deep symlinks can point to locations outside configured roots."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create nested structure with deep symlink pointing outside roots
|
||||||
|
subdir = loras_dir / "shared"
|
||||||
|
subdir.mkdir()
|
||||||
|
outside_root = tmp_path / "storage"
|
||||||
|
outside_root.mkdir()
|
||||||
|
deep_symlink = subdir / "models"
|
||||||
|
deep_symlink.symlink_to(outside_root, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create preview file under deep symlink (outside original roots)
|
||||||
|
preview_file = deep_symlink / "external.png"
|
||||||
|
preview_file.write_bytes(b"external")
|
||||||
|
|
||||||
|
cfg = config_module.Config()
|
||||||
|
|
||||||
|
# Preview access triggers symlink discovery
|
||||||
|
is_allowed = cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
|
||||||
|
# After discovery, preview should be allowed even though target is outside roots
|
||||||
|
assert is_allowed
|
||||||
|
normalized_outside = _normalize(str(outside_root))
|
||||||
|
assert normalized_outside in cfg._path_mappings
|
||||||
|
|
||||||
|
|
||||||
|
def test_normal_path_unaffected_by_discovery(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""Normal paths (no symlinks) are not affected by symlink discovery logic."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create normal file structure (no symlinks)
|
||||||
|
preview_file = loras_dir / "normal.preview.jpeg"
|
||||||
|
preview_file.write_bytes(b"normal")
|
||||||
|
|
||||||
|
cfg = config_module.Config()
|
||||||
|
|
||||||
|
# Normal paths work without any discovery
|
||||||
|
assert cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
assert len(cfg._path_mappings) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_first_level_symlink_still_works(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
"""First-level symlinks continue to work as before."""
|
||||||
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
# Create first-level symlink
|
||||||
|
external_dir = tmp_path / "first_level_external"
|
||||||
|
external_dir.mkdir()
|
||||||
|
first_symlink = loras_dir / "first_level"
|
||||||
|
first_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create preview file under first-level symlink
|
||||||
|
preview_file = first_symlink / "model.png"
|
||||||
|
preview_file.write_bytes(b"first_level")
|
||||||
|
|
||||||
|
cfg = config_module.Config()
|
||||||
|
|
||||||
|
# First-level symlinks are scanned during initialization
|
||||||
|
normalized_external = _normalize(str(external_dir))
|
||||||
|
assert normalized_external in cfg._path_mappings
|
||||||
|
assert cfg.is_preview_path_allowed(str(preview_file))
|
||||||
|
|
||||||
# Retarget the symlink
|
|
||||||
deep_symlink.unlink()
|
|
||||||
deep_symlink.symlink_to(target_v2, target_is_directory=True)
|
|
||||||
|
|
||||||
# Second config should detect changed target and rescan
|
|
||||||
second_cfg = config_module.Config()
|
|
||||||
assert _normalize(str(target_v2)) in second_cfg._path_mappings
|
|
||||||
assert _normalize(str(target_v1)) not in second_cfg._path_mappings
|
|
||||||
def test_legacy_symlink_cache_automatic_cleanup(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
def test_legacy_symlink_cache_automatic_cleanup(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
"""Test that legacy symlink cache is automatically cleaned up after migration."""
|
"""Test that legacy symlink cache is automatically cleaned up after migration."""
|
||||||
settings_dir = tmp_path / "settings"
|
settings_dir = tmp_path / "settings"
|
||||||
|
|||||||
290
tests/frontend/managers/FilterManager.tagLogic.test.js
Normal file
290
tests/frontend/managers/FilterManager.tagLogic.test.js
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('../../../static/js/state/index.js', () => ({
|
||||||
|
getCurrentPageState: vi.fn(() => ({
|
||||||
|
filters: {},
|
||||||
|
})),
|
||||||
|
state: {
|
||||||
|
currentPageType: 'loras',
|
||||||
|
loadingManager: {
|
||||||
|
showSimpleLoading: vi.fn(),
|
||||||
|
hide: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/utils/uiHelpers.js', () => ({
|
||||||
|
showToast: vi.fn(),
|
||||||
|
updatePanelPositions: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/api/modelApiFactory.js', () => ({
|
||||||
|
getModelApiClient: vi.fn(() => ({
|
||||||
|
loadMoreWithVirtualScroll: vi.fn().mockResolvedValue(),
|
||||||
|
})),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/utils/storageHelpers.js', () => ({
|
||||||
|
getStorageItem: vi.fn(),
|
||||||
|
setStorageItem: vi.fn(),
|
||||||
|
removeStorageItem: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/utils/i18nHelpers.js', () => ({
|
||||||
|
translate: vi.fn((key, _params, fallback) => fallback || key),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/managers/FilterPresetManager.js', () => ({
|
||||||
|
FilterPresetManager: vi.fn().mockImplementation(() => ({
|
||||||
|
renderPresets: vi.fn(),
|
||||||
|
saveActivePreset: vi.fn(),
|
||||||
|
restoreActivePreset: vi.fn(),
|
||||||
|
updateAddButtonState: vi.fn(),
|
||||||
|
hasEmptyWildcardResult: vi.fn(() => false),
|
||||||
|
})),
|
||||||
|
EMPTY_WILDCARD_MARKER: '__EMPTY_WILDCARD_RESULT__',
|
||||||
|
}));
|
||||||
|
|
||||||
|
import { FilterManager } from '../../../static/js/managers/FilterManager.js';
|
||||||
|
import { getStorageItem, setStorageItem } from '../../../static/js/utils/storageHelpers.js';
|
||||||
|
|
||||||
|
describe('FilterManager - Tag Logic', () => {
|
||||||
|
let manager;
|
||||||
|
let mockFilterPanel;
|
||||||
|
let mockTagLogicToggle;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Setup DOM mocks
|
||||||
|
mockFilterPanel = document.createElement('div');
|
||||||
|
mockFilterPanel.id = 'filterPanel';
|
||||||
|
mockFilterPanel.classList.add('hidden');
|
||||||
|
|
||||||
|
mockTagLogicToggle = document.createElement('div');
|
||||||
|
mockTagLogicToggle.id = 'tagLogicToggle';
|
||||||
|
|
||||||
|
// Create tag logic options
|
||||||
|
const anyOption = document.createElement('button');
|
||||||
|
anyOption.className = 'tag-logic-option';
|
||||||
|
anyOption.dataset.value = 'any';
|
||||||
|
mockTagLogicToggle.appendChild(anyOption);
|
||||||
|
|
||||||
|
const allOption = document.createElement('button');
|
||||||
|
allOption.className = 'tag-logic-option';
|
||||||
|
allOption.dataset.value = 'all';
|
||||||
|
mockTagLogicToggle.appendChild(allOption);
|
||||||
|
|
||||||
|
document.body.appendChild(mockFilterPanel);
|
||||||
|
document.body.appendChild(mockTagLogicToggle);
|
||||||
|
|
||||||
|
// Mock getElementById
|
||||||
|
const originalGetElementById = document.getElementById;
|
||||||
|
document.getElementById = vi.fn((id) => {
|
||||||
|
if (id === 'filterPanel') return mockFilterPanel;
|
||||||
|
if (id === 'tagLogicToggle') return mockTagLogicToggle;
|
||||||
|
if (id === 'filterButton') return document.createElement('button');
|
||||||
|
if (id === 'activeFiltersCount') return document.createElement('span');
|
||||||
|
if (id === 'baseModelTags') return document.createElement('div');
|
||||||
|
if (id === 'modelTypeTags') return document.createElement('div');
|
||||||
|
return originalGetElementById.call(document, id);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('initializeFilters', () => {
|
||||||
|
it('should default tagLogic to "any" when not provided', () => {
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('any');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use provided tagLogic value', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tagLogic: 'all',
|
||||||
|
tags: {},
|
||||||
|
baseModel: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('all');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('initializeTagLogicToggle', () => {
|
||||||
|
it('should set "any" option as active by default', () => {
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
// Ensure filters.tagLogic is set to default
|
||||||
|
manager.filters.tagLogic = 'any';
|
||||||
|
|
||||||
|
const anyOption = mockTagLogicToggle.querySelector('[data-value="any"]');
|
||||||
|
const allOption = mockTagLogicToggle.querySelector('[data-value="all"]');
|
||||||
|
|
||||||
|
// Manually update UI to ensure correct state
|
||||||
|
manager.updateTagLogicToggleUI();
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('any');
|
||||||
|
expect(anyOption.classList.contains('active')).toBe(true);
|
||||||
|
expect(allOption.classList.contains('active')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set "all" option as active when tagLogic is "all"', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tagLogic: 'all',
|
||||||
|
tags: {},
|
||||||
|
baseModel: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
// Ensure filters.tagLogic is set correctly
|
||||||
|
manager.filters.tagLogic = 'all';
|
||||||
|
|
||||||
|
const anyOption = mockTagLogicToggle.querySelector('[data-value="any"]');
|
||||||
|
const allOption = mockTagLogicToggle.querySelector('[data-value="all"]');
|
||||||
|
|
||||||
|
// Manually update UI to ensure correct state
|
||||||
|
manager.updateTagLogicToggleUI();
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('all');
|
||||||
|
expect(anyOption.classList.contains('active')).toBe(false);
|
||||||
|
expect(allOption.classList.contains('active')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateTagLogicToggleUI', () => {
|
||||||
|
it('should update UI when tagLogic changes', () => {
|
||||||
|
// Clear any existing active classes first
|
||||||
|
mockTagLogicToggle.querySelectorAll('.tag-logic-option').forEach(el => {
|
||||||
|
el.classList.remove('active');
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
let anyOption = mockTagLogicToggle.querySelector('[data-value="any"]');
|
||||||
|
let allOption = mockTagLogicToggle.querySelector('[data-value="all"]');
|
||||||
|
|
||||||
|
// Ensure initial state
|
||||||
|
manager.filters.tagLogic = 'any';
|
||||||
|
manager.updateTagLogicToggleUI();
|
||||||
|
expect(anyOption.classList.contains('active')).toBe(true);
|
||||||
|
expect(allOption.classList.contains('active')).toBe(false);
|
||||||
|
|
||||||
|
// Change to "all"
|
||||||
|
manager.filters.tagLogic = 'all';
|
||||||
|
manager.updateTagLogicToggleUI();
|
||||||
|
|
||||||
|
expect(anyOption.classList.contains('active')).toBe(false);
|
||||||
|
expect(allOption.classList.contains('active')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('cloneFilters', () => {
|
||||||
|
it('should include tagLogic in cloned filters', () => {
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
manager.filters.tagLogic = 'all';
|
||||||
|
|
||||||
|
const cloned = manager.cloneFilters();
|
||||||
|
|
||||||
|
expect(cloned.tagLogic).toBe('all');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('clearFilters', () => {
|
||||||
|
it('should reset tagLogic to "any"', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tagLogic: 'all',
|
||||||
|
tags: { anime: 'include' },
|
||||||
|
baseModel: ['SDXL'],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
expect(manager.filters.tagLogic).toBe('all');
|
||||||
|
|
||||||
|
manager.clearFilters();
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('any');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update UI after clearing', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tagLogic: 'all',
|
||||||
|
tags: {},
|
||||||
|
baseModel: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
const anyOption = mockTagLogicToggle.querySelector('[data-value="any"]');
|
||||||
|
const allOption = mockTagLogicToggle.querySelector('[data-value="all"]');
|
||||||
|
|
||||||
|
// Initially "all" is active
|
||||||
|
expect(allOption.classList.contains('active')).toBe(true);
|
||||||
|
|
||||||
|
manager.clearFilters();
|
||||||
|
|
||||||
|
// After clear, "any" should be active
|
||||||
|
expect(anyOption.classList.contains('active')).toBe(true);
|
||||||
|
expect(allOption.classList.contains('active')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadFiltersFromStorage', () => {
|
||||||
|
it('should restore tagLogic from storage', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tagLogic: 'all',
|
||||||
|
tags: { anime: 'include' },
|
||||||
|
baseModel: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('all');
|
||||||
|
expect(manager.filters.tags).toEqual({ anime: 'include' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should default to "any" when no tagLogic in storage', () => {
|
||||||
|
getStorageItem.mockReturnValue({
|
||||||
|
tags: {},
|
||||||
|
baseModel: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('any');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('tag logic toggle interaction', () => {
|
||||||
|
it('should update tagLogic when clicking "all" option', async () => {
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
const allOption = mockTagLogicToggle.querySelector('[data-value="all"]');
|
||||||
|
|
||||||
|
// Simulate click
|
||||||
|
allOption.click();
|
||||||
|
|
||||||
|
// Wait for async operation
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 0));
|
||||||
|
|
||||||
|
expect(manager.filters.tagLogic).toBe('all');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not change tagLogic when clicking already active option', async () => {
|
||||||
|
manager = new FilterManager({ page: 'loras' });
|
||||||
|
|
||||||
|
const anyOption = mockTagLogicToggle.querySelector('[data-value="any"]');
|
||||||
|
const applyFiltersSpy = vi.spyOn(manager, 'applyFilters');
|
||||||
|
|
||||||
|
// Click already active option
|
||||||
|
anyOption.click();
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 0));
|
||||||
|
|
||||||
|
// applyFilters should not be called since value didn't change
|
||||||
|
expect(applyFiltersSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -47,6 +47,8 @@ class StubDownloadManager:
|
|||||||
self.resume_error: Exception | None = None
|
self.resume_error: Exception | None = None
|
||||||
self.stop_error: Exception | None = None
|
self.stop_error: Exception | None = None
|
||||||
self.force_error: Exception | None = None
|
self.force_error: Exception | None = None
|
||||||
|
self.check_pending_result: dict[str, Any] | None = None
|
||||||
|
self.check_pending_calls: list[list[str]] = []
|
||||||
|
|
||||||
async def get_status(self, request: web.Request) -> dict[str, Any]:
|
async def get_status(self, request: web.Request) -> dict[str, Any]:
|
||||||
return {"success": True, "status": "idle"}
|
return {"success": True, "status": "idle"}
|
||||||
@@ -75,6 +77,20 @@ class StubDownloadManager:
|
|||||||
raise self.force_error
|
raise self.force_error
|
||||||
return {"success": True, "payload": payload}
|
return {"success": True, "payload": payload}
|
||||||
|
|
||||||
|
async def check_pending_models(self, model_types: list[str]) -> dict[str, Any]:
|
||||||
|
self.check_pending_calls.append(model_types)
|
||||||
|
if self.check_pending_result is not None:
|
||||||
|
return self.check_pending_result
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": False,
|
||||||
|
"total_models": 100,
|
||||||
|
"pending_count": 10,
|
||||||
|
"processed_count": 90,
|
||||||
|
"failed_count": 0,
|
||||||
|
"needs_download": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class StubImportUseCase:
|
class StubImportUseCase:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
@@ -236,3 +252,123 @@ async def test_import_route_returns_validation_errors():
|
|||||||
assert response.status == 400
|
assert response.status == 400
|
||||||
body = await _json(response)
|
body = await _json(response)
|
||||||
assert body == {"success": False, "error": "bad payload"}
|
assert body == {"success": False, "error": "bad payload"}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check_example_images_needed_returns_pending_counts():
|
||||||
|
"""Test that check_example_images_needed endpoint returns pending model counts."""
|
||||||
|
async with registrar_app() as harness:
|
||||||
|
harness.download_manager.check_pending_result = {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": False,
|
||||||
|
"total_models": 5500,
|
||||||
|
"pending_count": 12,
|
||||||
|
"processed_count": 5488,
|
||||||
|
"failed_count": 45,
|
||||||
|
"needs_download": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/check-example-images-needed",
|
||||||
|
json={"model_types": ["lora", "checkpoint"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status == 200
|
||||||
|
body = await _json(response)
|
||||||
|
assert body["success"] is True
|
||||||
|
assert body["total_models"] == 5500
|
||||||
|
assert body["pending_count"] == 12
|
||||||
|
assert body["processed_count"] == 5488
|
||||||
|
assert body["failed_count"] == 45
|
||||||
|
assert body["needs_download"] is True
|
||||||
|
assert body["is_downloading"] is False
|
||||||
|
|
||||||
|
# Verify the manager was called with correct model types
|
||||||
|
assert harness.download_manager.check_pending_calls == [["lora", "checkpoint"]]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check_example_images_needed_handles_download_in_progress():
|
||||||
|
"""Test that check_example_images_needed returns correct status when download is running."""
|
||||||
|
async with registrar_app() as harness:
|
||||||
|
harness.download_manager.check_pending_result = {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": True,
|
||||||
|
"total_models": 0,
|
||||||
|
"pending_count": 0,
|
||||||
|
"processed_count": 0,
|
||||||
|
"failed_count": 0,
|
||||||
|
"needs_download": False,
|
||||||
|
"message": "Download already in progress",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/check-example-images-needed",
|
||||||
|
json={"model_types": ["lora"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status == 200
|
||||||
|
body = await _json(response)
|
||||||
|
assert body["success"] is True
|
||||||
|
assert body["is_downloading"] is True
|
||||||
|
assert body["needs_download"] is False
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check_example_images_needed_handles_no_pending_models():
|
||||||
|
"""Test that check_example_images_needed returns correct status when no work is needed."""
|
||||||
|
async with registrar_app() as harness:
|
||||||
|
harness.download_manager.check_pending_result = {
|
||||||
|
"success": True,
|
||||||
|
"is_downloading": False,
|
||||||
|
"total_models": 5500,
|
||||||
|
"pending_count": 0,
|
||||||
|
"processed_count": 5500,
|
||||||
|
"failed_count": 0,
|
||||||
|
"needs_download": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/check-example-images-needed",
|
||||||
|
json={"model_types": ["lora", "checkpoint", "embedding"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status == 200
|
||||||
|
body = await _json(response)
|
||||||
|
assert body["success"] is True
|
||||||
|
assert body["pending_count"] == 0
|
||||||
|
assert body["needs_download"] is False
|
||||||
|
assert body["processed_count"] == 5500
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check_example_images_needed_uses_default_model_types():
|
||||||
|
"""Test that check_example_images_needed uses default model types when not specified."""
|
||||||
|
async with registrar_app() as harness:
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/check-example-images-needed",
|
||||||
|
json={}, # No model_types specified
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status == 200
|
||||||
|
# Should use default model types
|
||||||
|
assert harness.download_manager.check_pending_calls == [["lora", "checkpoint", "embedding"]]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check_example_images_needed_returns_error_on_exception():
|
||||||
|
"""Test that check_example_images_needed returns 500 on internal error."""
|
||||||
|
async with registrar_app() as harness:
|
||||||
|
# Simulate an error by setting result to an error state
|
||||||
|
# Actually, we need to make the method raise an exception
|
||||||
|
original_method = harness.download_manager.check_pending_models
|
||||||
|
|
||||||
|
async def failing_check(_model_types):
|
||||||
|
raise RuntimeError("Database connection failed")
|
||||||
|
|
||||||
|
harness.download_manager.check_pending_models = failing_check
|
||||||
|
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/check-example-images-needed",
|
||||||
|
json={"model_types": ["lora"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status == 500
|
||||||
|
body = await _json(response)
|
||||||
|
assert body["success"] is False
|
||||||
|
assert "Database connection failed" in body["error"]
|
||||||
|
|||||||
@@ -502,6 +502,7 @@ def test_handler_set_route_mapping_includes_all_handlers() -> None:
|
|||||||
"resume_example_images",
|
"resume_example_images",
|
||||||
"stop_example_images",
|
"stop_example_images",
|
||||||
"force_download_example_images",
|
"force_download_example_images",
|
||||||
|
"check_example_images_needed",
|
||||||
"import_example_images",
|
"import_example_images",
|
||||||
"delete_example_image",
|
"delete_example_image",
|
||||||
"set_example_image_nsfw_level",
|
"set_example_image_nsfw_level",
|
||||||
|
|||||||
@@ -188,3 +188,91 @@ def test_is_preview_path_allowed_rejects_prefix_without_separator(tmp_path):
|
|||||||
# The sibling path should NOT be allowed even though it shares a prefix
|
# The sibling path should NOT be allowed even though it shares a prefix
|
||||||
assert not config.is_preview_path_allowed(str(sibling_file)), \
|
assert not config.is_preview_path_allowed(str(sibling_file)), \
|
||||||
f"Path in '{sibling_root}' should NOT be allowed when root is '{library_root}'"
|
f"Path in '{sibling_root}' should NOT be allowed when root is '{library_root}'"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_preview_handler_serves_from_deep_symlink(tmp_path):
|
||||||
|
"""Test that previews under deep symlinks are served correctly."""
|
||||||
|
library_root = tmp_path / "library"
|
||||||
|
library_root.mkdir()
|
||||||
|
|
||||||
|
# Create nested structure with deep symlink at second level
|
||||||
|
subdir = library_root / "anime"
|
||||||
|
subdir.mkdir()
|
||||||
|
external_dir = tmp_path / "external"
|
||||||
|
external_dir.mkdir()
|
||||||
|
deep_symlink = subdir / "styles"
|
||||||
|
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create preview file under deep symlink
|
||||||
|
preview_file = deep_symlink / "model.preview.webp"
|
||||||
|
preview_file.write_bytes(b"preview_content")
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config.apply_library_settings(
|
||||||
|
{
|
||||||
|
"folder_paths": {
|
||||||
|
"loras": [str(library_root)],
|
||||||
|
"checkpoints": [],
|
||||||
|
"unet": [],
|
||||||
|
"embeddings": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
handler = PreviewHandler(config=config)
|
||||||
|
encoded_path = urllib.parse.quote(str(preview_file), safe="")
|
||||||
|
request = make_mocked_request("GET", f"/api/lm/previews?path={encoded_path}")
|
||||||
|
|
||||||
|
response = await handler.serve_preview(request)
|
||||||
|
|
||||||
|
assert isinstance(response, web.FileResponse)
|
||||||
|
assert response.status == 200
|
||||||
|
assert Path(response._path) == preview_file.resolve()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_deep_symlink_discovered_on_first_access(tmp_path):
|
||||||
|
"""Test that deep symlinks are discovered on first preview access."""
|
||||||
|
library_root = tmp_path / "library"
|
||||||
|
library_root.mkdir()
|
||||||
|
|
||||||
|
# Create nested structure with deep symlink at second level
|
||||||
|
subdir = library_root / "category"
|
||||||
|
subdir.mkdir()
|
||||||
|
external_dir = tmp_path / "storage"
|
||||||
|
external_dir.mkdir()
|
||||||
|
deep_symlink = subdir / "models"
|
||||||
|
deep_symlink.symlink_to(external_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Create preview file under deep symlink
|
||||||
|
preview_file = deep_symlink / "test.png"
|
||||||
|
preview_file.write_bytes(b"test_image")
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config.apply_library_settings(
|
||||||
|
{
|
||||||
|
"folder_paths": {
|
||||||
|
"loras": [str(library_root)],
|
||||||
|
"checkpoints": [],
|
||||||
|
"unet": [],
|
||||||
|
"embeddings": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Deep symlink should not be in mappings initially
|
||||||
|
normalized_external = os.path.normpath(str(external_dir)).replace(os.sep, '/')
|
||||||
|
assert normalized_external not in config._path_mappings
|
||||||
|
|
||||||
|
handler = PreviewHandler(config=config)
|
||||||
|
encoded_path = urllib.parse.quote(str(preview_file), safe="")
|
||||||
|
request = make_mocked_request("GET", f"/api/lm/previews?path={encoded_path}")
|
||||||
|
|
||||||
|
# First access should trigger symlink discovery and serve the preview
|
||||||
|
response = await handler.serve_preview(request)
|
||||||
|
|
||||||
|
assert isinstance(response, web.FileResponse)
|
||||||
|
assert response.status == 200
|
||||||
|
assert Path(response._path) == preview_file.resolve()
|
||||||
|
|
||||||
|
# Deep symlink should now be in mappings
|
||||||
|
assert normalized_external in config._path_mappings
|
||||||
|
|||||||
166
tests/routes/test_tag_logic_param_parsing.py
Normal file
166
tests/routes/test_tag_logic_param_parsing.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
"""Tests for tag_logic parameter parsing in model handlers."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock
|
||||||
|
from aiohttp import web
|
||||||
|
from aiohttp.test_utils import TestClient, TestServer
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
folder_paths_stub = types.SimpleNamespace(get_folder_paths=lambda *_: [])
|
||||||
|
sys.modules.setdefault("folder_paths", folder_paths_stub)
|
||||||
|
|
||||||
|
from py.routes.handlers.model_handlers import ModelListingHandler
|
||||||
|
|
||||||
|
|
||||||
|
class MockService:
|
||||||
|
"""Mock service for testing."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.model_type = "test-model"
|
||||||
|
|
||||||
|
async def get_paginated_data(self, **kwargs):
|
||||||
|
# Store the kwargs for verification
|
||||||
|
self.last_call_kwargs = kwargs
|
||||||
|
return {
|
||||||
|
"items": [],
|
||||||
|
"total": 0,
|
||||||
|
"page": 1,
|
||||||
|
"page_size": 20,
|
||||||
|
"total_pages": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def format_response(self, item):
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def parse_specific_params(request):
|
||||||
|
"""No specific params for testing."""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def handler():
|
||||||
|
service = MockService()
|
||||||
|
logger = Mock()
|
||||||
|
return ModelListingHandler(
|
||||||
|
service=service,
|
||||||
|
parse_specific_params=parse_specific_params,
|
||||||
|
logger=logger,
|
||||||
|
), service
|
||||||
|
|
||||||
|
|
||||||
|
async def make_request(handler, query_string=""):
|
||||||
|
"""Helper to create a request and call get_models."""
|
||||||
|
app = web.Application()
|
||||||
|
|
||||||
|
async def test_handler(request):
|
||||||
|
return await handler.get_models(request)
|
||||||
|
|
||||||
|
app.router.add_get("/test", test_handler)
|
||||||
|
server = TestServer(app)
|
||||||
|
client = TestClient(server)
|
||||||
|
await client.start_server()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.get(f"/test?{query_string}")
|
||||||
|
return response
|
||||||
|
finally:
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_default_is_any(handler):
|
||||||
|
"""Test that tag_logic defaults to 'any' when not provided."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
response = await make_request(h, "tag_include=anime&tag_include=realistic")
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
# Verify tag_logic was set to 'any' by default
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "any"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_explicit_any(handler):
|
||||||
|
"""Test that tag_logic='any' is correctly parsed."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
response = await make_request(h, "tag_include=anime&tag_logic=any")
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "any"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_explicit_all(handler):
|
||||||
|
"""Test that tag_logic='all' is correctly parsed."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
response = await make_request(h, "tag_include=anime&tag_include=realistic&tag_logic=all")
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "all"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_case_insensitive(handler):
|
||||||
|
"""Test that tag_logic values are case insensitive."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
# Test uppercase
|
||||||
|
response = await make_request(h, "tag_logic=ALL")
|
||||||
|
assert response.status == 200
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "all"
|
||||||
|
|
||||||
|
# Test mixed case
|
||||||
|
response = await make_request(h, "tag_logic=Any")
|
||||||
|
assert response.status == 200
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "any"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_invalid_value_defaults_to_any(handler):
|
||||||
|
"""Test that invalid tag_logic values default to 'any'."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
response = await make_request(h, "tag_logic=invalid")
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
# Should default to 'any' for invalid values
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "any"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_param_with_other_filters(handler):
|
||||||
|
"""Test that tag_logic works correctly with other filter parameters."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
query = (
|
||||||
|
"tag_include=anime&"
|
||||||
|
"tag_include=character&"
|
||||||
|
"tag_exclude=nsfw&"
|
||||||
|
"base_model=SDXL&"
|
||||||
|
"tag_logic=all"
|
||||||
|
)
|
||||||
|
response = await make_request(h, query)
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "all"
|
||||||
|
assert service.last_call_kwargs["base_models"] == ["SDXL"]
|
||||||
|
assert "anime" in service.last_call_kwargs["tags"]
|
||||||
|
assert "character" in service.last_call_kwargs["tags"]
|
||||||
|
assert "nsfw" in service.last_call_kwargs["tags"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tag_logic_without_include_tags(handler):
|
||||||
|
"""Test that tag_logic is still passed even without include tags."""
|
||||||
|
h, service = handler
|
||||||
|
|
||||||
|
response = await make_request(h, "tag_logic=all&base_model=SDXL")
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
# tag_logic should still be set even without tag filters
|
||||||
|
assert service.last_call_kwargs["tag_logic"] == "all"
|
||||||
283
tests/services/test_cache_entry_validator.py
Normal file
283
tests/services/test_cache_entry_validator.py
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for CacheEntryValidator
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py.services.cache_entry_validator import (
|
||||||
|
CacheEntryValidator,
|
||||||
|
ValidationResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheEntryValidator:
|
||||||
|
"""Tests for CacheEntryValidator class"""
|
||||||
|
|
||||||
|
def test_validate_valid_entry(self):
|
||||||
|
"""Test validation of a valid cache entry"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
'model_name': 'Test Model',
|
||||||
|
'size': 1024,
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
'tags': ['tag1', 'tag2'],
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
assert len(result.errors) == 0
|
||||||
|
assert result.entry == entry
|
||||||
|
|
||||||
|
def test_validate_missing_required_field_sha256(self):
|
||||||
|
"""Test validation fails when required sha256 field is missing"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
# sha256 missing
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('sha256' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_missing_required_field_file_path(self):
|
||||||
|
"""Test validation fails when required file_path field is missing"""
|
||||||
|
entry = {
|
||||||
|
# file_path missing
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_empty_required_field_sha256(self):
|
||||||
|
"""Test validation fails when sha256 is empty string"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': '', # Empty string
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('sha256' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_empty_required_field_file_path(self):
|
||||||
|
"""Test validation fails when file_path is empty string"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '', # Empty string
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_none_required_field(self):
|
||||||
|
"""Test validation fails when required field is None"""
|
||||||
|
entry = {
|
||||||
|
'file_path': None,
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_none_entry(self):
|
||||||
|
"""Test validation handles None entry"""
|
||||||
|
result = CacheEntryValidator.validate(None, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('None' in error for error in result.errors)
|
||||||
|
assert result.entry is None
|
||||||
|
|
||||||
|
def test_validate_non_dict_entry(self):
|
||||||
|
"""Test validation handles non-dict entry"""
|
||||||
|
result = CacheEntryValidator.validate("not a dict", auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('not a dict' in error for error in result.errors)
|
||||||
|
assert result.entry is None
|
||||||
|
|
||||||
|
def test_auto_repair_missing_non_required_field(self):
|
||||||
|
"""Test auto-repair adds missing non-required fields"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
# file_name, model_name, tags missing
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is True
|
||||||
|
assert result.entry['file_name'] == ''
|
||||||
|
assert result.entry['model_name'] == ''
|
||||||
|
assert result.entry['tags'] == []
|
||||||
|
|
||||||
|
def test_auto_repair_wrong_type_field(self):
|
||||||
|
"""Test auto-repair fixes fields with wrong type"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'size': 'not a number', # Should be int
|
||||||
|
'tags': 'not a list', # Should be list
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is True
|
||||||
|
assert result.entry['size'] == 0 # Default value
|
||||||
|
assert result.entry['tags'] == [] # Default value
|
||||||
|
|
||||||
|
def test_normalize_sha256_lowercase(self):
|
||||||
|
"""Test sha256 is normalized to lowercase"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'ABC123DEF456', # Uppercase
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.entry['sha256'] == 'abc123def456'
|
||||||
|
|
||||||
|
def test_validate_batch_all_valid(self):
|
||||||
|
"""Test batch validation with all valid entries"""
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
'sha256': 'def456',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 2
|
||||||
|
assert len(invalid) == 0
|
||||||
|
|
||||||
|
def test_validate_batch_mixed_validity(self):
|
||||||
|
"""Test batch validation with mixed valid/invalid entries"""
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
# sha256 missing - invalid
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test3.safetensors',
|
||||||
|
'sha256': 'def456',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 2
|
||||||
|
assert len(invalid) == 1
|
||||||
|
# invalid list contains the actual invalid entries (not by index)
|
||||||
|
assert invalid[0]['file_path'] == '/models/test2.safetensors'
|
||||||
|
|
||||||
|
def test_validate_batch_empty_list(self):
|
||||||
|
"""Test batch validation with empty list"""
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch([], auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 0
|
||||||
|
assert len(invalid) == 0
|
||||||
|
|
||||||
|
def test_get_file_path_safe(self):
|
||||||
|
"""Test safe file_path extraction"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors', 'sha256': 'abc123'}
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(entry) == '/models/test.safetensors'
|
||||||
|
|
||||||
|
def test_get_file_path_safe_missing(self):
|
||||||
|
"""Test safe file_path extraction when missing"""
|
||||||
|
entry = {'sha256': 'abc123'}
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(entry) == ''
|
||||||
|
|
||||||
|
def test_get_file_path_safe_not_dict(self):
|
||||||
|
"""Test safe file_path extraction from non-dict"""
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(None) == ''
|
||||||
|
assert CacheEntryValidator.get_file_path_safe('string') == ''
|
||||||
|
|
||||||
|
def test_get_sha256_safe(self):
|
||||||
|
"""Test safe sha256 extraction"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors', 'sha256': 'ABC123'}
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(entry) == 'abc123'
|
||||||
|
|
||||||
|
def test_get_sha256_safe_missing(self):
|
||||||
|
"""Test safe sha256 extraction when missing"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors'}
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(entry) == ''
|
||||||
|
|
||||||
|
def test_get_sha256_safe_not_dict(self):
|
||||||
|
"""Test safe sha256 extraction from non-dict"""
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(None) == ''
|
||||||
|
assert CacheEntryValidator.get_sha256_safe('string') == ''
|
||||||
|
|
||||||
|
def test_validate_with_all_optional_fields(self):
|
||||||
|
"""Test validation with all optional fields present"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
'model_name': 'Test Model',
|
||||||
|
'folder': 'test_folder',
|
||||||
|
'size': 1024,
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
'tags': ['tag1', 'tag2'],
|
||||||
|
'preview_url': 'http://example.com/preview.jpg',
|
||||||
|
'base_model': 'SD1.5',
|
||||||
|
'from_civitai': True,
|
||||||
|
'favorite': True,
|
||||||
|
'exclude': False,
|
||||||
|
'db_checked': True,
|
||||||
|
'preview_nsfw_level': 1,
|
||||||
|
'notes': 'Test notes',
|
||||||
|
'usage_tips': 'Test tips',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
assert result.entry == entry
|
||||||
|
|
||||||
|
def test_validate_numeric_field_accepts_float_for_int(self):
|
||||||
|
"""Test that numeric fields accept float for int type"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'size': 1024.5, # Float for int field
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
364
tests/services/test_cache_health_monitor.py
Normal file
364
tests/services/test_cache_health_monitor.py
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for CacheHealthMonitor
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py.services.cache_health_monitor import (
|
||||||
|
CacheHealthMonitor,
|
||||||
|
CacheHealthStatus,
|
||||||
|
HealthReport,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheHealthMonitor:
|
||||||
|
"""Tests for CacheHealthMonitor class"""
|
||||||
|
|
||||||
|
def test_check_health_all_valid_entries(self):
|
||||||
|
"""Test health check with 100% valid entries"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(100)
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 100
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.repaired_entries == 0
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
assert report.message == "Cache is healthy"
|
||||||
|
|
||||||
|
def test_check_health_degraded_cache(self):
|
||||||
|
"""Test health check with 1-5% invalid entries (degraded)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# Create 100 entries, 2 invalid (2%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(98)
|
||||||
|
]
|
||||||
|
# Add 2 invalid entries
|
||||||
|
entries.append({'file_path': '/models/invalid1.safetensors'}) # Missing sha256
|
||||||
|
entries.append({'file_path': '/models/invalid2.safetensors'}) # Missing sha256
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 98
|
||||||
|
assert report.invalid_entries == 2
|
||||||
|
assert report.corruption_rate == 0.02
|
||||||
|
# Message describes the issue without necessarily containing the word "degraded"
|
||||||
|
assert 'invalid entries' in report.message.lower()
|
||||||
|
|
||||||
|
def test_check_health_corrupted_cache(self):
|
||||||
|
"""Test health check with >5% invalid entries (corrupted)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# Create 100 entries, 10 invalid (10%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(90)
|
||||||
|
]
|
||||||
|
# Add 10 invalid entries
|
||||||
|
for i in range(10):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 90
|
||||||
|
assert report.invalid_entries == 10
|
||||||
|
assert report.corruption_rate == 0.10
|
||||||
|
assert 'corrupted' in report.message.lower()
|
||||||
|
|
||||||
|
def test_check_health_empty_cache(self):
|
||||||
|
"""Test health check with empty cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = monitor.check_health([], auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 0
|
||||||
|
assert report.valid_entries == 0
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
assert report.message == "Cache is empty"
|
||||||
|
|
||||||
|
def test_check_health_single_invalid_entry(self):
|
||||||
|
"""Test health check with 1 invalid entry out of 1 (100% corruption)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [{'file_path': '/models/invalid.safetensors'}]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.total_entries == 1
|
||||||
|
assert report.valid_entries == 0
|
||||||
|
assert report.invalid_entries == 1
|
||||||
|
assert report.corruption_rate == 1.0
|
||||||
|
|
||||||
|
def test_check_health_boundary_degraded_threshold(self):
|
||||||
|
"""Test health check at degraded threshold (1%)"""
|
||||||
|
monitor = CacheHealthMonitor(degraded_threshold=0.01)
|
||||||
|
|
||||||
|
# 100 entries, 1 invalid (exactly 1%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(99)
|
||||||
|
]
|
||||||
|
entries.append({'file_path': '/models/invalid.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.corruption_rate == 0.01
|
||||||
|
|
||||||
|
def test_check_health_boundary_corrupted_threshold(self):
|
||||||
|
"""Test health check at corrupted threshold (5%)"""
|
||||||
|
monitor = CacheHealthMonitor(corrupted_threshold=0.05)
|
||||||
|
|
||||||
|
# 100 entries, 5 invalid (exactly 5%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(95)
|
||||||
|
]
|
||||||
|
for i in range(5):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.corruption_rate == 0.05
|
||||||
|
|
||||||
|
def test_check_health_below_degraded_threshold(self):
|
||||||
|
"""Test health check below degraded threshold (0%)"""
|
||||||
|
monitor = CacheHealthMonitor(degraded_threshold=0.01)
|
||||||
|
|
||||||
|
# All entries valid
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(100)
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
|
||||||
|
def test_check_health_auto_repair(self):
|
||||||
|
"""Test health check with auto_repair enabled"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# 1 entry with all fields (won't be repaired), 1 entry with missing non-required fields (will be repaired)
|
||||||
|
complete_entry = {
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'hash1',
|
||||||
|
'file_name': 'test1.safetensors',
|
||||||
|
'model_name': 'Model 1',
|
||||||
|
'folder': '',
|
||||||
|
'size': 0,
|
||||||
|
'modified': 0.0,
|
||||||
|
'tags': ['tag1'],
|
||||||
|
'preview_url': '',
|
||||||
|
'base_model': '',
|
||||||
|
'from_civitai': True,
|
||||||
|
'favorite': False,
|
||||||
|
'exclude': False,
|
||||||
|
'db_checked': False,
|
||||||
|
'preview_nsfw_level': 0,
|
||||||
|
'notes': '',
|
||||||
|
'usage_tips': '',
|
||||||
|
}
|
||||||
|
incomplete_entry = {
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
'sha256': 'hash2',
|
||||||
|
# Missing many optional fields (will be repaired)
|
||||||
|
}
|
||||||
|
|
||||||
|
entries = [complete_entry, incomplete_entry]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 2
|
||||||
|
assert report.valid_entries == 2
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.repaired_entries == 1
|
||||||
|
|
||||||
|
def test_should_notify_user_healthy(self):
|
||||||
|
"""Test should_notify_user for healthy cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=100,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is healthy"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is False
|
||||||
|
|
||||||
|
def test_should_notify_user_degraded(self):
|
||||||
|
"""Test should_notify_user for degraded cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is degraded"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is True
|
||||||
|
|
||||||
|
def test_should_notify_user_corrupted(self):
|
||||||
|
"""Test should_notify_user for corrupted cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=90,
|
||||||
|
invalid_entries=10,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is True
|
||||||
|
|
||||||
|
def test_get_notification_severity_degraded(self):
|
||||||
|
"""Test get_notification_severity for degraded cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is degraded"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.get_notification_severity(report) == 'warning'
|
||||||
|
|
||||||
|
def test_get_notification_severity_corrupted(self):
|
||||||
|
"""Test get_notification_severity for corrupted cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=90,
|
||||||
|
invalid_entries=10,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.get_notification_severity(report) == 'error'
|
||||||
|
|
||||||
|
def test_report_to_dict(self):
|
||||||
|
"""Test HealthReport to_dict conversion"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=1,
|
||||||
|
invalid_paths=['/path1', '/path2'],
|
||||||
|
message="Cache issues detected"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert result['status'] == 'degraded'
|
||||||
|
assert result['total_entries'] == 100
|
||||||
|
assert result['valid_entries'] == 98
|
||||||
|
assert result['invalid_entries'] == 2
|
||||||
|
assert result['repaired_entries'] == 1
|
||||||
|
assert result['corruption_rate'] == '2.0%'
|
||||||
|
assert len(result['invalid_paths']) == 2
|
||||||
|
assert result['message'] == "Cache issues detected"
|
||||||
|
|
||||||
|
def test_report_corruption_rate_zero_division(self):
|
||||||
|
"""Test corruption_rate calculation with zero entries"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=0,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
|
||||||
|
def test_check_health_collects_invalid_paths(self):
|
||||||
|
"""Test health check collects invalid entry paths"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/valid.safetensors',
|
||||||
|
'sha256': 'hash1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/invalid1.safetensors',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/invalid2.safetensors',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(report.invalid_paths) == 2
|
||||||
|
assert '/models/invalid1.safetensors' in report.invalid_paths
|
||||||
|
assert '/models/invalid2.safetensors' in report.invalid_paths
|
||||||
|
|
||||||
|
def test_report_to_dict_limits_invalid_paths(self):
|
||||||
|
"""Test that to_dict limits invalid_paths to first 10"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=15,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=15,
|
||||||
|
repaired_entries=0,
|
||||||
|
invalid_paths=[f'/path{i}' for i in range(15)],
|
||||||
|
message="Cache corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert len(result['invalid_paths']) == 10
|
||||||
|
assert result['invalid_paths'][0] == '/path0'
|
||||||
|
assert result['invalid_paths'][-1] == '/path9'
|
||||||
368
tests/services/test_check_pending_models.py
Normal file
368
tests/services/test_check_pending_models.py
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
"""Tests for the check_pending_models lightweight pre-check functionality."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py.services.settings_manager import get_settings_manager
|
||||||
|
from py.utils import example_images_download_manager as download_module
|
||||||
|
|
||||||
|
|
||||||
|
class StubScanner:
|
||||||
|
"""Scanner double returning predetermined cache contents."""
|
||||||
|
|
||||||
|
def __init__(self, models: list[dict]) -> None:
|
||||||
|
self._cache = SimpleNamespace(raw_data=models)
|
||||||
|
|
||||||
|
async def get_cached_data(self):
|
||||||
|
return self._cache
|
||||||
|
|
||||||
|
|
||||||
|
def _patch_scanners(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
lora_scanner: StubScanner | None = None,
|
||||||
|
checkpoint_scanner: StubScanner | None = None,
|
||||||
|
embedding_scanner: StubScanner | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Patch ServiceRegistry to return stub scanners."""
|
||||||
|
|
||||||
|
async def _get_lora_scanner(cls):
|
||||||
|
return lora_scanner or StubScanner([])
|
||||||
|
|
||||||
|
async def _get_checkpoint_scanner(cls):
|
||||||
|
return checkpoint_scanner or StubScanner([])
|
||||||
|
|
||||||
|
async def _get_embedding_scanner(cls):
|
||||||
|
return embedding_scanner or StubScanner([])
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
download_module.ServiceRegistry,
|
||||||
|
"get_lora_scanner",
|
||||||
|
classmethod(_get_lora_scanner),
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
download_module.ServiceRegistry,
|
||||||
|
"get_checkpoint_scanner",
|
||||||
|
classmethod(_get_checkpoint_scanner),
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
download_module.ServiceRegistry,
|
||||||
|
"get_embedding_scanner",
|
||||||
|
classmethod(_get_embedding_scanner),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingWebSocketManager:
|
||||||
|
"""Collects broadcast payloads for assertions."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.payloads: list[dict] = []
|
||||||
|
|
||||||
|
async def broadcast(self, payload: dict) -> None:
|
||||||
|
self.payloads.append(payload)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_returns_zero_when_all_processed(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models returns 0 pending when all models are processed."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
# Create processed models
|
||||||
|
processed_hashes = ["a" * 64, "b" * 64, "c" * 64]
|
||||||
|
models = [
|
||||||
|
{"sha256": h, "model_name": f"Model {i}"}
|
||||||
|
for i, h in enumerate(processed_hashes)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create progress file with all models processed
|
||||||
|
progress_file = tmp_path / ".download_progress.json"
|
||||||
|
progress_file.write_text(
|
||||||
|
json.dumps({"processed_models": processed_hashes, "failed_models": []}),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create model directories with files (simulating completed downloads)
|
||||||
|
for h in processed_hashes:
|
||||||
|
model_dir = tmp_path / h
|
||||||
|
model_dir.mkdir()
|
||||||
|
(model_dir / "image_0.png").write_text("data")
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["is_downloading"] is False
|
||||||
|
assert result["total_models"] == 3
|
||||||
|
assert result["pending_count"] == 0
|
||||||
|
assert result["processed_count"] == 3
|
||||||
|
assert result["needs_download"] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_finds_unprocessed_models(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models correctly identifies unprocessed models."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
# Create models - some processed, some not
|
||||||
|
processed_hash = "a" * 64
|
||||||
|
unprocessed_hash = "b" * 64
|
||||||
|
models = [
|
||||||
|
{"sha256": processed_hash, "model_name": "Processed Model"},
|
||||||
|
{"sha256": unprocessed_hash, "model_name": "Unprocessed Model"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create progress file with only one model processed
|
||||||
|
progress_file = tmp_path / ".download_progress.json"
|
||||||
|
progress_file.write_text(
|
||||||
|
json.dumps({"processed_models": [processed_hash], "failed_models": []}),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create directory only for processed model
|
||||||
|
processed_dir = tmp_path / processed_hash
|
||||||
|
processed_dir.mkdir()
|
||||||
|
(processed_dir / "image_0.png").write_text("data")
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 2
|
||||||
|
assert result["pending_count"] == 1
|
||||||
|
assert result["processed_count"] == 1
|
||||||
|
assert result["needs_download"] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_skips_models_without_hash(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that models without sha256 are not counted as pending."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
# Models - one with hash, one without
|
||||||
|
models = [
|
||||||
|
{"sha256": "a" * 64, "model_name": "Hashed Model"},
|
||||||
|
{"sha256": None, "model_name": "No Hash Model"},
|
||||||
|
{"model_name": "Missing Hash Model"}, # No sha256 key at all
|
||||||
|
]
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 3
|
||||||
|
assert result["pending_count"] == 1 # Only the one with hash
|
||||||
|
assert result["needs_download"] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_handles_multiple_model_types(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models aggregates counts across multiple model types."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
lora_models = [
|
||||||
|
{"sha256": "a" * 64, "model_name": "Lora 1"},
|
||||||
|
{"sha256": "b" * 64, "model_name": "Lora 2"},
|
||||||
|
]
|
||||||
|
checkpoint_models = [
|
||||||
|
{"sha256": "c" * 64, "model_name": "Checkpoint 1"},
|
||||||
|
]
|
||||||
|
embedding_models = [
|
||||||
|
{"sha256": "d" * 64, "model_name": "Embedding 1"},
|
||||||
|
{"sha256": "e" * 64, "model_name": "Embedding 2"},
|
||||||
|
{"sha256": "f" * 64, "model_name": "Embedding 3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
_patch_scanners(
|
||||||
|
monkeypatch,
|
||||||
|
lora_scanner=StubScanner(lora_models),
|
||||||
|
checkpoint_scanner=StubScanner(checkpoint_models),
|
||||||
|
embedding_scanner=StubScanner(embedding_models),
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora", "checkpoint", "embedding"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 6 # 2 + 1 + 3
|
||||||
|
assert result["pending_count"] == 6 # All unprocessed
|
||||||
|
assert result["needs_download"] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_returns_error_when_download_in_progress(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models returns special response when download is running."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
# Simulate download in progress
|
||||||
|
manager._is_downloading = True
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["is_downloading"] is True
|
||||||
|
assert result["needs_download"] is False
|
||||||
|
assert result["pending_count"] == 0
|
||||||
|
assert "already in progress" in result["message"].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_handles_empty_library(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models handles empty model library."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner([]))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 0
|
||||||
|
assert result["pending_count"] == 0
|
||||||
|
assert result["processed_count"] == 0
|
||||||
|
assert result["needs_download"] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_reads_failed_models(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models correctly reports failed model count."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
models = [{"sha256": "a" * 64, "model_name": "Model"}]
|
||||||
|
|
||||||
|
# Create progress file with failed models
|
||||||
|
progress_file = tmp_path / ".download_progress.json"
|
||||||
|
progress_file.write_text(
|
||||||
|
json.dumps({"processed_models": [], "failed_models": ["a" * 64, "b" * 64]}),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["failed_count"] == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_handles_missing_progress_file(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models works correctly when no progress file exists."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
models = [
|
||||||
|
{"sha256": "a" * 64, "model_name": "Model 1"},
|
||||||
|
{"sha256": "b" * 64, "model_name": "Model 2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
# No progress file created
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 2
|
||||||
|
assert result["pending_count"] == 2 # All pending since no progress
|
||||||
|
assert result["processed_count"] == 0
|
||||||
|
assert result["failed_count"] == 0
|
||||||
|
assert result["needs_download"] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.usefixtures("tmp_path")
|
||||||
|
async def test_check_pending_models_handles_corrupted_progress_file(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
tmp_path,
|
||||||
|
settings_manager,
|
||||||
|
):
|
||||||
|
"""Test that check_pending_models handles corrupted progress file gracefully."""
|
||||||
|
ws_manager = RecordingWebSocketManager()
|
||||||
|
manager = download_module.DownloadManager(ws_manager=ws_manager)
|
||||||
|
|
||||||
|
monkeypatch.setitem(settings_manager.settings, "example_images_path", str(tmp_path))
|
||||||
|
|
||||||
|
models = [{"sha256": "a" * 64, "model_name": "Model"}]
|
||||||
|
|
||||||
|
# Create corrupted progress file
|
||||||
|
progress_file = tmp_path / ".download_progress.json"
|
||||||
|
progress_file.write_text("not valid json", encoding="utf-8")
|
||||||
|
|
||||||
|
_patch_scanners(monkeypatch, lora_scanner=StubScanner(models))
|
||||||
|
|
||||||
|
result = await manager.check_pending_models(["lora"])
|
||||||
|
|
||||||
|
# Should still succeed, treating all as unprocessed
|
||||||
|
assert result["success"] is True
|
||||||
|
assert result["total_models"] == 1
|
||||||
|
assert result["pending_count"] == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def settings_manager():
|
||||||
|
return get_settings_manager()
|
||||||
110
tests/services/test_duplicate_detection.py
Normal file
110
tests/services/test_duplicate_detection.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
"""Test for duplicate detection by source URL."""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_find_duplicate_recipes_by_source():
|
||||||
|
"""Test that duplicate recipes are detected by source URL."""
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
|
||||||
|
scanner = MagicMock(spec=RecipeScanner)
|
||||||
|
scanner.get_cached_data = AsyncMock()
|
||||||
|
|
||||||
|
cache = MagicMock()
|
||||||
|
cache.raw_data = [
|
||||||
|
{
|
||||||
|
'id': '8705c972-ef08-47f3-8ac3-9ac3b8ff4c0b',
|
||||||
|
'source_path': 'https://civitai.com/images/119165946',
|
||||||
|
'title': 'Recipe 1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': '52e636ce-ea9f-4f64-a6a9-c704bd715889',
|
||||||
|
'source_path': 'https://civitai.com/images/119165946',
|
||||||
|
'title': 'Recipe 2'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': '00000000-0000-0000-0000-000000000001',
|
||||||
|
'source_path': 'https://civitai.com/images/999999999',
|
||||||
|
'title': 'Recipe 3'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': '00000000-0000-0000-0000-000000000002',
|
||||||
|
'source_path': '',
|
||||||
|
'title': 'Recipe 4 (no source)'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
scanner.get_cached_data.return_value = cache
|
||||||
|
|
||||||
|
# Call the actual method on the mocked scanner
|
||||||
|
from py.services.recipe_scanner import RecipeScanner as RealRecipeScanner
|
||||||
|
result = await RealRecipeScanner.find_duplicate_recipes_by_source(scanner)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert 'https://civitai.com/images/119165946' in result
|
||||||
|
assert len(result['https://civitai.com/images/119165946']) == 2
|
||||||
|
assert '8705c972-ef08-47f3-8ac3-9ac3b8ff4c0b' in result['https://civitai.com/images/119165946']
|
||||||
|
assert '52e636ce-ea9f-4f64-a6a9-c704bd715889' in result['https://civitai.com/images/119165946']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_find_duplicate_recipes_by_source_empty():
|
||||||
|
"""Test that empty result is returned when no duplicates found."""
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
|
||||||
|
scanner = MagicMock(spec=RecipeScanner)
|
||||||
|
scanner.get_cached_data = AsyncMock()
|
||||||
|
|
||||||
|
cache = MagicMock()
|
||||||
|
cache.raw_data = [
|
||||||
|
{
|
||||||
|
'id': '8705c972-ef08-47f3-8ac3-9ac3b8ff4c0b',
|
||||||
|
'source_path': 'https://civitai.com/images/119165946',
|
||||||
|
'title': 'Recipe 1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': '00000000-0000-0000-0000-000000000002',
|
||||||
|
'source_path': '',
|
||||||
|
'title': 'Recipe 2 (no source)'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
scanner.get_cached_data.return_value = cache
|
||||||
|
|
||||||
|
from py.services.recipe_scanner import RecipeScanner as RealRecipeScanner
|
||||||
|
result = await RealRecipeScanner.find_duplicate_recipes_by_source(scanner)
|
||||||
|
|
||||||
|
assert len(result) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_find_duplicate_recipes_by_source_trimming_whitespace():
|
||||||
|
"""Test that whitespace is trimmed from source URLs."""
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
|
||||||
|
scanner = MagicMock(spec=RecipeScanner)
|
||||||
|
scanner.get_cached_data = AsyncMock()
|
||||||
|
|
||||||
|
cache = MagicMock()
|
||||||
|
cache.raw_data = [
|
||||||
|
{
|
||||||
|
'id': '8705c972-ef08-47f3-8ac3-9ac3b8ff4c0b',
|
||||||
|
'source_path': 'https://civitai.com/images/119165946',
|
||||||
|
'title': 'Recipe 1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': '52e636ce-ea9f-4f64-a6a9-c704bd715889',
|
||||||
|
'source_path': ' https://civitai.com/images/119165946 ',
|
||||||
|
'title': 'Recipe 2'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
scanner.get_cached_data.return_value = cache
|
||||||
|
|
||||||
|
from py.services.recipe_scanner import RecipeScanner as RealRecipeScanner
|
||||||
|
result = await RealRecipeScanner.find_duplicate_recipes_by_source(scanner)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert 'https://civitai.com/images/119165946' in result
|
||||||
|
assert len(result['https://civitai.com/images/119165946']) == 2
|
||||||
@@ -482,6 +482,81 @@ async def test_relink_metadata_raises_when_version_missing():
|
|||||||
model_version_id=None,
|
model_version_id=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_and_update_model_persists_db_checked_when_sqlite_fails(tmp_path):
|
||||||
|
"""
|
||||||
|
Regression test: When a deleted model is checked against sqlite and not found,
|
||||||
|
db_checked=True must be persisted to disk so the model is skipped in future refreshes.
|
||||||
|
|
||||||
|
Previously, db_checked was set in memory but never saved because the save_metadata
|
||||||
|
call was inside the `if civitai_api_not_found:` block, which is False for deleted
|
||||||
|
models (since the default CivitAI API is never tried).
|
||||||
|
"""
|
||||||
|
default_provider = SimpleNamespace(
|
||||||
|
get_model_by_hash=AsyncMock(),
|
||||||
|
get_model_version=AsyncMock(),
|
||||||
|
)
|
||||||
|
civarchive_provider = SimpleNamespace(
|
||||||
|
get_model_by_hash=AsyncMock(return_value=(None, "Model not found")),
|
||||||
|
get_model_version=AsyncMock(),
|
||||||
|
)
|
||||||
|
sqlite_provider = SimpleNamespace(
|
||||||
|
get_model_by_hash=AsyncMock(return_value=(None, "Model not found")),
|
||||||
|
get_model_version=AsyncMock(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def select_provider(name: str):
|
||||||
|
if name == "civarchive_api":
|
||||||
|
return civarchive_provider
|
||||||
|
if name == "sqlite":
|
||||||
|
return sqlite_provider
|
||||||
|
return default_provider
|
||||||
|
|
||||||
|
provider_selector = AsyncMock(side_effect=select_provider)
|
||||||
|
helpers = build_service(
|
||||||
|
settings_values={"enable_metadata_archive_db": True},
|
||||||
|
default_provider=default_provider,
|
||||||
|
provider_selector=provider_selector,
|
||||||
|
)
|
||||||
|
|
||||||
|
model_path = tmp_path / "model.safetensors"
|
||||||
|
model_data = {
|
||||||
|
"civitai_deleted": True,
|
||||||
|
"db_checked": False,
|
||||||
|
"from_civitai": False,
|
||||||
|
"file_path": str(model_path),
|
||||||
|
"model_name": "Deleted Model",
|
||||||
|
}
|
||||||
|
update_cache = AsyncMock()
|
||||||
|
|
||||||
|
ok, error = await helpers.service.fetch_and_update_model(
|
||||||
|
sha256="deadbeef",
|
||||||
|
file_path=str(model_path),
|
||||||
|
model_data=model_data,
|
||||||
|
update_cache_func=update_cache,
|
||||||
|
)
|
||||||
|
|
||||||
|
# The call should fail because neither provider found metadata
|
||||||
|
assert not ok
|
||||||
|
assert error is not None
|
||||||
|
assert "Model not found" in error or "not found in metadata archive DB" in error
|
||||||
|
|
||||||
|
# Both providers should have been tried
|
||||||
|
assert civarchive_provider.get_model_by_hash.await_count == 1
|
||||||
|
assert sqlite_provider.get_model_by_hash.await_count == 1
|
||||||
|
|
||||||
|
# db_checked should be True in memory
|
||||||
|
assert model_data["db_checked"] is True
|
||||||
|
|
||||||
|
# CRITICAL: metadata should have been saved to disk with db_checked=True
|
||||||
|
helpers.metadata_manager.save_metadata.assert_awaited_once()
|
||||||
|
saved_call = helpers.metadata_manager.save_metadata.await_args
|
||||||
|
saved_data = saved_call.args[1]
|
||||||
|
assert saved_data["db_checked"] is True
|
||||||
|
assert "folder" not in saved_data # folder should be stripped
|
||||||
|
assert "last_checked_at" in saved_data # timestamp should be set
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_fetch_and_update_model_does_not_overwrite_api_metadata_with_archive(tmp_path):
|
async def test_fetch_and_update_model_does_not_overwrite_api_metadata_with_archive(tmp_path):
|
||||||
helpers = build_service()
|
helpers = build_service()
|
||||||
|
|||||||
167
tests/services/test_model_scanner_cache_validation.py
Normal file
167
tests/services/test_model_scanner_cache_validation.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for cache validation in ModelScanner
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from py.services.model_scanner import ModelScanner
|
||||||
|
from py.services.cache_entry_validator import CacheEntryValidator
|
||||||
|
from py.services.cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_validates_cache_entries(tmp_path_factory):
|
||||||
|
"""Test that ModelScanner validates cache entries during initialization"""
|
||||||
|
# Create temporary test data
|
||||||
|
tmp_dir = tmp_path_factory.mktemp("test_loras")
|
||||||
|
|
||||||
|
# Create test files
|
||||||
|
test_file = tmp_dir / "test_model.safetensors"
|
||||||
|
test_file.write_bytes(b"fake model data" * 100)
|
||||||
|
|
||||||
|
# Mock model scanner (we can't easily instantiate a full scanner in tests)
|
||||||
|
# Instead, test the validation logic directly
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': str(test_file),
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test_model.safetensors',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': str(tmp_dir / 'invalid.safetensors'),
|
||||||
|
# Missing sha256 - invalid
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert len(valid) == 1
|
||||||
|
assert len(invalid) == 1
|
||||||
|
assert valid[0]['sha256'] == 'abc123def456'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_detects_degraded_cache():
|
||||||
|
"""Test that ModelScanner detects degraded cache health"""
|
||||||
|
# Create 100 entries with 2% corruption
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(98)
|
||||||
|
]
|
||||||
|
# Add 2 invalid entries
|
||||||
|
entries.append({'file_path': '/models/invalid1.safetensors'})
|
||||||
|
entries.append({'file_path': '/models/invalid2.safetensors'})
|
||||||
|
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.invalid_entries == 2
|
||||||
|
assert report.valid_entries == 98
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_detects_corrupted_cache():
|
||||||
|
"""Test that ModelScanner detects corrupted cache health"""
|
||||||
|
# Create 100 entries with 10% corruption
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(90)
|
||||||
|
]
|
||||||
|
# Add 10 invalid entries
|
||||||
|
for i in range(10):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.invalid_entries == 10
|
||||||
|
assert report.valid_entries == 90
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_removes_invalid_from_hash_index():
|
||||||
|
"""Test that ModelScanner removes invalid entries from hash index"""
|
||||||
|
from py.services.model_hash_index import ModelHashIndex
|
||||||
|
|
||||||
|
# Create a hash index with some entries
|
||||||
|
hash_index = ModelHashIndex()
|
||||||
|
valid_entry = {
|
||||||
|
'file_path': '/models/valid.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
}
|
||||||
|
invalid_entry = {
|
||||||
|
'file_path': '/models/invalid.safetensors',
|
||||||
|
'sha256': '', # Empty sha256
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add entries to hash index
|
||||||
|
hash_index.add_entry(valid_entry['sha256'], valid_entry['file_path'])
|
||||||
|
hash_index.add_entry(invalid_entry['sha256'], invalid_entry['file_path'])
|
||||||
|
|
||||||
|
# Verify both entries are in the index (using get_hash method)
|
||||||
|
assert hash_index.get_hash(valid_entry['file_path']) == valid_entry['sha256']
|
||||||
|
# Invalid entry won't be added due to empty sha256
|
||||||
|
assert hash_index.get_hash(invalid_entry['file_path']) is None
|
||||||
|
|
||||||
|
# Simulate removing invalid entry (it's not actually there, but let's test the method)
|
||||||
|
hash_index.remove_by_path(
|
||||||
|
CacheEntryValidator.get_file_path_safe(invalid_entry),
|
||||||
|
CacheEntryValidator.get_sha256_safe(invalid_entry)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify valid entry remains
|
||||||
|
assert hash_index.get_hash(valid_entry['file_path']) == valid_entry['sha256']
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_entry_validator_handles_various_field_types():
|
||||||
|
"""Test that validator handles various field types correctly"""
|
||||||
|
# Test with different field types
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'size': 1024, # int
|
||||||
|
'modified': 1234567890.0, # float
|
||||||
|
'favorite': True, # bool
|
||||||
|
'tags': ['tag1', 'tag2'], # list
|
||||||
|
'exclude': False, # bool
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_health_report_serialization():
|
||||||
|
"""Test that HealthReport can be serialized to dict"""
|
||||||
|
from py.services.cache_health_monitor import HealthReport
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=1,
|
||||||
|
invalid_paths=['/path1', '/path2'],
|
||||||
|
message="Cache issues detected"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert result['status'] == 'degraded'
|
||||||
|
assert result['total_entries'] == 100
|
||||||
|
assert result['valid_entries'] == 98
|
||||||
|
assert result['invalid_entries'] == 2
|
||||||
|
assert result['repaired_entries'] == 1
|
||||||
|
assert result['corruption_rate'] == '2.0%'
|
||||||
|
assert len(result['invalid_paths']) == 2
|
||||||
|
assert result['message'] == "Cache issues detected"
|
||||||
276
tests/services/test_tag_logic_filter.py
Normal file
276
tests/services/test_tag_logic_filter.py
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
"""Tests for tag logic (OR/AND) filtering functionality."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from py.services.model_query import ModelFilterSet, FilterCriteria
|
||||||
|
|
||||||
|
|
||||||
|
class StubSettings:
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
class TestTagLogicFilter:
|
||||||
|
"""Test cases for tag_logic parameter in FilterCriteria."""
|
||||||
|
|
||||||
|
def test_tag_logic_any_returns_items_with_any_tag(self):
|
||||||
|
"""Test that tag_logic='any' (OR) returns items matching any include tag."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
{"name": "m4", "tags": ["style"]},
|
||||||
|
{"name": "m5", "tags": []},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime OR realistic (should match m1, m2, m3)
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"},
|
||||||
|
tag_logic="any"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 3
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m2", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_all_returns_items_with_all_tags(self):
|
||||||
|
"""Test that tag_logic='all' (AND) returns only items matching all include tags."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
{"name": "m4", "tags": ["style"]},
|
||||||
|
{"name": "m5", "tags": []},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime AND realistic (should match only m3)
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["name"] == "m3"
|
||||||
|
|
||||||
|
def test_tag_logic_all_with_single_tag(self):
|
||||||
|
"""Test that tag_logic='all' with single tag works same as 'any'."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include only anime with 'all' logic
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include"},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_any_with_exclude_tags(self):
|
||||||
|
"""Test that tag_logic='any' works correctly with exclude tags."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
{"name": "m4", "tags": ["nsfw"]},
|
||||||
|
{"name": "m5", "tags": ["anime", "nsfw"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime OR realistic, exclude nsfw
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={
|
||||||
|
"anime": "include",
|
||||||
|
"realistic": "include",
|
||||||
|
"nsfw": "exclude"
|
||||||
|
},
|
||||||
|
tag_logic="any"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Should match m1 (anime), m2 (realistic), m3 (both)
|
||||||
|
# m4 excluded by nsfw, m5 excluded by nsfw
|
||||||
|
assert len(result) == 3
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m2", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_all_with_exclude_tags(self):
|
||||||
|
"""Test that tag_logic='all' works correctly with exclude tags."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime", "character"]},
|
||||||
|
{"name": "m2", "tags": ["realistic", "character"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic", "character"]},
|
||||||
|
{"name": "m4", "tags": ["anime", "character", "nsfw"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime AND character, exclude nsfw
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={
|
||||||
|
"anime": "include",
|
||||||
|
"character": "include",
|
||||||
|
"nsfw": "exclude"
|
||||||
|
},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# m1: has anime+character, no nsfw ✓
|
||||||
|
# m2: missing anime ✗
|
||||||
|
# m3: has anime+character, no nsfw ✓
|
||||||
|
# m4: has anime+character but also nsfw ✗
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_all_with_no_tags_special_case(self):
|
||||||
|
"""Test tag_logic='all' with __no_tags__ special tag.
|
||||||
|
|
||||||
|
When __no_tags__ is used with 'all' logic along with regular tags,
|
||||||
|
the behavior is: items with no tags are returned (since they satisfy
|
||||||
|
__no_tags__), OR items that have all the regular tags.
|
||||||
|
This is because __no_tags__ is a special condition that can't be ANDed
|
||||||
|
with regular tags in a meaningful way.
|
||||||
|
"""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": []},
|
||||||
|
{"name": "m3", "tags": None},
|
||||||
|
{"name": "m4", "tags": ["anime", "character"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime AND __no_tags__ with 'all' logic
|
||||||
|
# Implementation treats this as: no tags OR (all regular tags)
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "__no_tags__": "include"},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Items with no tags: m2, m3
|
||||||
|
# Items with all regular tags (anime): m1, m4
|
||||||
|
# Combined: m1, m2, m3, m4 (all items)
|
||||||
|
assert len(result) == 4
|
||||||
|
|
||||||
|
def test_tag_logic_any_with_no_tags_special_case(self):
|
||||||
|
"""Test tag_logic='any' with __no_tags__ special tag."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": []},
|
||||||
|
{"name": "m3", "tags": None},
|
||||||
|
{"name": "m4", "tags": ["realistic"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime OR __no_tags__
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "__no_tags__": "include"},
|
||||||
|
tag_logic="any"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Should match m1 (anime), m2 (no tags), m3 (no tags)
|
||||||
|
assert len(result) == 3
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m2", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_default_is_any(self):
|
||||||
|
"""Test that default tag_logic is 'any' when not specified."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Not specifying tag_logic should default to 'any'
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"}
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Should match m1, m2, m3 (OR behavior)
|
||||||
|
assert len(result) == 3
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m2", "m3"}
|
||||||
|
|
||||||
|
def test_tag_logic_case_insensitive(self):
|
||||||
|
"""Test that tag_logic values are case insensitive."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "realistic"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Test uppercase 'ALL'
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"},
|
||||||
|
tag_logic="ALL"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["name"] == "m3"
|
||||||
|
|
||||||
|
# Test mixed case 'Any'
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"},
|
||||||
|
tag_logic="Any"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 3
|
||||||
|
|
||||||
|
def test_tag_logic_all_with_three_tags(self):
|
||||||
|
"""Test tag_logic='all' with three include tags."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["anime", "character"]},
|
||||||
|
{"name": "m3", "tags": ["anime", "character", "style"]},
|
||||||
|
{"name": "m4", "tags": ["character", "style"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include anime AND character AND style
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={
|
||||||
|
"anime": "include",
|
||||||
|
"character": "include",
|
||||||
|
"style": "include"
|
||||||
|
},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Only m3 has all three tags
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["name"] == "m3"
|
||||||
|
|
||||||
|
def test_tag_logic_empty_include_tags(self):
|
||||||
|
"""Test that empty include tags with any logic returns all items."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime"]},
|
||||||
|
{"name": "m2", "tags": ["realistic"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Only exclude tags, no include tags
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"nsfw": "exclude"},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Both should match since no include filters
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
def test_tag_logic_with_none_tags_field(self):
|
||||||
|
"""Test tag_logic handles items with None tags field."""
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["anime", "realistic"]},
|
||||||
|
{"name": "m2", "tags": None},
|
||||||
|
{"name": "m3", "tags": ["anime"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
criteria = FilterCriteria(
|
||||||
|
tags={"anime": "include", "realistic": "include"},
|
||||||
|
tag_logic="all"
|
||||||
|
)
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# Only m1 has both anime and realistic
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["name"] == "m1"
|
||||||
@@ -242,6 +242,148 @@ async def test_bulk_metadata_refresh_reports_errors() -> None:
|
|||||||
assert progress.events[-1]["error"] == "boom"
|
assert progress.events[-1]["error"] == "boom"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_bulk_metadata_refresh_skips_confirmed_not_found_models(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
"""Models marked as from_civitai=False and civitai_deleted=True should be skipped."""
|
||||||
|
scanner = MockScanner()
|
||||||
|
scanner._cache.raw_data = [
|
||||||
|
{
|
||||||
|
"file_path": "model1.safetensors",
|
||||||
|
"sha256": "hash1",
|
||||||
|
"from_civitai": False,
|
||||||
|
"civitai_deleted": True,
|
||||||
|
"model_name": "NotOnCivitAI",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"file_path": "model2.safetensors",
|
||||||
|
"sha256": "hash2",
|
||||||
|
"from_civitai": True,
|
||||||
|
"model_name": "OnCivitAI",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
service = MockModelService(scanner)
|
||||||
|
metadata_sync = StubMetadataSync()
|
||||||
|
settings = StubSettings(enable_metadata_archive_db=False)
|
||||||
|
progress = ProgressCollector()
|
||||||
|
|
||||||
|
async def fake_hydrate(model_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
# Preserve the original data (simulating no metadata file on disk)
|
||||||
|
return model_data
|
||||||
|
|
||||||
|
monkeypatch.setattr(MetadataManager, "hydrate_model_data", staticmethod(fake_hydrate))
|
||||||
|
|
||||||
|
use_case = BulkMetadataRefreshUseCase(
|
||||||
|
service=service,
|
||||||
|
metadata_sync=metadata_sync,
|
||||||
|
settings_service=settings,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await use_case.execute_with_error_handling(progress_callback=progress)
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
# Only model2 should be processed (model1 is skipped)
|
||||||
|
assert result["processed"] == 1
|
||||||
|
assert result["updated"] == 1
|
||||||
|
assert len(metadata_sync.calls) == 1
|
||||||
|
assert metadata_sync.calls[0]["file_path"] == "model2.safetensors"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_bulk_metadata_refresh_skips_when_archive_checked(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
"""Models with db_checked=True should be skipped even if archive DB is enabled."""
|
||||||
|
scanner = MockScanner()
|
||||||
|
scanner._cache.raw_data = [
|
||||||
|
{
|
||||||
|
"file_path": "model1.safetensors",
|
||||||
|
"sha256": "hash1",
|
||||||
|
"from_civitai": False,
|
||||||
|
"civitai_deleted": True,
|
||||||
|
"db_checked": True,
|
||||||
|
"model_name": "ArchiveChecked",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"file_path": "model2.safetensors",
|
||||||
|
"sha256": "hash2",
|
||||||
|
"from_civitai": False,
|
||||||
|
"civitai_deleted": True,
|
||||||
|
"db_checked": False,
|
||||||
|
"model_name": "ArchiveNotChecked",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
service = MockModelService(scanner)
|
||||||
|
metadata_sync = StubMetadataSync()
|
||||||
|
settings = StubSettings(enable_metadata_archive_db=True)
|
||||||
|
progress = ProgressCollector()
|
||||||
|
|
||||||
|
async def fake_hydrate(model_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
return model_data
|
||||||
|
|
||||||
|
monkeypatch.setattr(MetadataManager, "hydrate_model_data", staticmethod(fake_hydrate))
|
||||||
|
|
||||||
|
use_case = BulkMetadataRefreshUseCase(
|
||||||
|
service=service,
|
||||||
|
metadata_sync=metadata_sync,
|
||||||
|
settings_service=settings,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await use_case.execute_with_error_handling(progress_callback=progress)
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
# Only model2 should be processed (model1 has db_checked=True)
|
||||||
|
assert result["processed"] == 1
|
||||||
|
assert result["updated"] == 1
|
||||||
|
assert len(metadata_sync.calls) == 1
|
||||||
|
assert metadata_sync.calls[0]["file_path"] == "model2.safetensors"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_bulk_metadata_refresh_processes_never_fetched_models(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
) -> None:
|
||||||
|
"""Models that have never been fetched (from_civitai=None) should be processed."""
|
||||||
|
scanner = MockScanner()
|
||||||
|
scanner._cache.raw_data = [
|
||||||
|
{
|
||||||
|
"file_path": "model1.safetensors",
|
||||||
|
"sha256": "hash1",
|
||||||
|
"from_civitai": None,
|
||||||
|
"model_name": "NeverFetched",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"file_path": "model2.safetensors",
|
||||||
|
"sha256": "hash2",
|
||||||
|
"model_name": "NoFromCivitaiField",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
service = MockModelService(scanner)
|
||||||
|
metadata_sync = StubMetadataSync()
|
||||||
|
settings = StubSettings(enable_metadata_archive_db=False)
|
||||||
|
progress = ProgressCollector()
|
||||||
|
|
||||||
|
async def fake_hydrate(model_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
return model_data
|
||||||
|
|
||||||
|
monkeypatch.setattr(MetadataManager, "hydrate_model_data", staticmethod(fake_hydrate))
|
||||||
|
|
||||||
|
use_case = BulkMetadataRefreshUseCase(
|
||||||
|
service=service,
|
||||||
|
metadata_sync=metadata_sync,
|
||||||
|
settings_service=settings,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await use_case.execute_with_error_handling(progress_callback=progress)
|
||||||
|
|
||||||
|
assert result["success"] is True
|
||||||
|
# Both models should be processed
|
||||||
|
assert result["processed"] == 2
|
||||||
|
assert result["updated"] == 2
|
||||||
|
assert len(metadata_sync.calls) == 2
|
||||||
|
|
||||||
|
|
||||||
async def test_download_model_use_case_raises_validation_error() -> None:
|
async def test_download_model_use_case_raises_validation_error() -> None:
|
||||||
coordinator = StubDownloadCoordinator(error="validation")
|
coordinator = StubDownloadCoordinator(error="validation")
|
||||||
use_case = DownloadModelUseCase(download_coordinator=coordinator)
|
use_case = DownloadModelUseCase(download_coordinator=coordinator)
|
||||||
|
|||||||
@@ -75,6 +75,31 @@ def test_get_file_extension_defaults_to_jpg() -> None:
|
|||||||
assert ext == ".jpg"
|
assert ext == ".jpg"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_file_extension_from_media_type_hint_video() -> None:
|
||||||
|
"""Test that media_type_hint='video' returns .mp4 when other methods fail"""
|
||||||
|
ext = processor_module.ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
||||||
|
b"", {}, "https://c.genur.art/536be3c9-e506-4365-b078-bfbc5df9ceec", "video"
|
||||||
|
)
|
||||||
|
assert ext == ".mp4"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_file_extension_from_media_type_hint_image() -> None:
|
||||||
|
"""Test that media_type_hint='image' falls back to .jpg"""
|
||||||
|
ext = processor_module.ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
||||||
|
b"", {}, "https://example.com/no-extension", "image"
|
||||||
|
)
|
||||||
|
assert ext == ".jpg"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_file_extension_media_type_hint_low_priority() -> None:
|
||||||
|
"""Test that media_type_hint is only used as last resort (after URL extension)"""
|
||||||
|
# URL has extension, should use that instead of media_type_hint
|
||||||
|
ext = processor_module.ExampleImagesProcessor._get_file_extension_from_content_or_headers(
|
||||||
|
b"", {}, "https://example.com/video.mp4", "image"
|
||||||
|
)
|
||||||
|
assert ext == ".mp4"
|
||||||
|
|
||||||
|
|
||||||
class StubScanner:
|
class StubScanner:
|
||||||
def __init__(self, models: list[Dict[str, Any]]) -> None:
|
def __init__(self, models: list[Dict[str, Any]]) -> None:
|
||||||
self._cache = SimpleNamespace(raw_data=models)
|
self._cache = SimpleNamespace(raw_data=models)
|
||||||
|
|||||||
100
tests/utils/test_fingerprint_fallback.py
Normal file
100
tests/utils/test_fingerprint_fallback.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"""Test for modelVersionId fallback in fingerprint calculation."""
|
||||||
|
import pytest
|
||||||
|
from py.utils.utils import calculate_recipe_fingerprint
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_with_model_version_id_fallback():
|
||||||
|
"""Test that fingerprint uses modelVersionId when hash is empty, even when not deleted."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 2639467,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == "2639467:1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_with_multiple_model_version_ids():
|
||||||
|
"""Test fingerprint with multiple loras using modelVersionId fallback."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 2639467,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 0.8,
|
||||||
|
"modelVersionId": 1234567,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == "1234567:0.8|2639467:1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_with_deleted_lora():
|
||||||
|
"""Test that deleted loras with modelVersionId are still included."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 2639467,
|
||||||
|
"isDeleted": True,
|
||||||
|
"exclude": False
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == "2639467:1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_with_excluded_lora():
|
||||||
|
"""Test that excluded loras are skipped even with modelVersionId."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 2639467,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": True
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_prefers_hash_over_version_id():
|
||||||
|
"""Test that hash is used even when modelVersionId is present."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "abc123",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 2639467,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == "abc123:1.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_fingerprint_without_hash_or_version_id():
|
||||||
|
"""Test that loras without hash or modelVersionId are skipped."""
|
||||||
|
loras = [
|
||||||
|
{
|
||||||
|
"hash": "",
|
||||||
|
"strength": 1.0,
|
||||||
|
"modelVersionId": 0,
|
||||||
|
"isDeleted": False,
|
||||||
|
"exclude": False
|
||||||
|
}
|
||||||
|
]
|
||||||
|
fingerprint = calculate_recipe_fingerprint(loras)
|
||||||
|
assert fingerprint == ""
|
||||||
@@ -6,7 +6,8 @@ export default defineConfig({
|
|||||||
globals: true,
|
globals: true,
|
||||||
setupFiles: ['tests/frontend/setup.js'],
|
setupFiles: ['tests/frontend/setup.js'],
|
||||||
include: [
|
include: [
|
||||||
'tests/frontend/**/*.test.js'
|
'tests/frontend/**/*.test.js',
|
||||||
|
'tests/frontend/**/*.test.ts'
|
||||||
],
|
],
|
||||||
coverage: {
|
coverage: {
|
||||||
enabled: process.env.VITEST_COVERAGE === 'true',
|
enabled: process.env.VITEST_COVERAGE === 'true',
|
||||||
|
|||||||
1865
vue-widgets/package-lock.json
generated
1865
vue-widgets/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user