mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-26 23:48:52 -03:00
feat: add configurable mature blur threshold setting
Add new setting 'mature_blur_level' with options PG13/R/X/XXX to control which NSFW rating level triggers blur filtering when NSFW blur is enabled. - Backend: update preview selection logic to respect threshold - Frontend: update UI components to use configurable threshold - Settings: add validation and normalization for mature_blur_level - Tests: add coverage for new threshold behavior - Translations: add keys for all supported languages Fixes #867
This commit is contained in:
@@ -15,7 +15,8 @@ describe('state module', () => {
|
||||
expect(defaultSettings).toMatchObject({
|
||||
civitai_api_key: '',
|
||||
language: 'en',
|
||||
blur_mature_content: true
|
||||
blur_mature_content: true,
|
||||
mature_blur_level: 'R'
|
||||
});
|
||||
|
||||
expect(defaultSettings.download_path_templates).toEqual(DEFAULT_PATH_TEMPLATES);
|
||||
|
||||
18
tests/frontend/utils/constants.matureBlurThreshold.test.js
Normal file
18
tests/frontend/utils/constants.matureBlurThreshold.test.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { NSFW_LEVELS, getMatureBlurThreshold } from '../../../static/js/utils/constants.js';
|
||||
|
||||
describe('getMatureBlurThreshold', () => {
|
||||
it('returns configured PG13 threshold', () => {
|
||||
expect(getMatureBlurThreshold({ mature_blur_level: 'PG13' })).toBe(NSFW_LEVELS.PG13);
|
||||
});
|
||||
|
||||
it('normalizes lowercase values', () => {
|
||||
expect(getMatureBlurThreshold({ mature_blur_level: 'x' })).toBe(NSFW_LEVELS.X);
|
||||
});
|
||||
|
||||
it('falls back to R when value is invalid or missing', () => {
|
||||
expect(getMatureBlurThreshold({ mature_blur_level: 'invalid' })).toBe(NSFW_LEVELS.R);
|
||||
expect(getMatureBlurThreshold({})).toBe(NSFW_LEVELS.R);
|
||||
});
|
||||
});
|
||||
@@ -265,6 +265,32 @@ def test_delete_setting(manager):
|
||||
assert manager.get("example") is None
|
||||
|
||||
|
||||
def test_missing_mature_blur_level_defaults_to_r(tmp_path, monkeypatch):
|
||||
manager = _create_manager_with_settings(
|
||||
tmp_path,
|
||||
monkeypatch,
|
||||
{
|
||||
"blur_mature_content": True,
|
||||
"folder_paths": {},
|
||||
},
|
||||
)
|
||||
|
||||
assert manager.get("mature_blur_level") == "R"
|
||||
|
||||
|
||||
def test_invalid_mature_blur_level_is_normalized_to_r(tmp_path, monkeypatch):
|
||||
manager = _create_manager_with_settings(
|
||||
tmp_path,
|
||||
monkeypatch,
|
||||
{
|
||||
"mature_blur_level": "unsafe",
|
||||
"folder_paths": {},
|
||||
},
|
||||
)
|
||||
|
||||
assert manager.get("mature_blur_level") == "R"
|
||||
|
||||
|
||||
def test_model_name_display_setting_notifies_scanners(tmp_path, monkeypatch):
|
||||
initial = {
|
||||
"libraries": {"default": {"folder_paths": {}, "default_lora_root": "", "default_checkpoint_root": "", "default_embedding_root": ""}},
|
||||
|
||||
@@ -1,30 +1,7 @@
|
||||
from py.utils.preview_selection import select_preview_media
|
||||
import pytest
|
||||
|
||||
|
||||
def test_select_preview_prefers_safe_media_when_blurred():
|
||||
images = [
|
||||
{"url": "nsfw", "type": "image", "nsfwLevel": 8},
|
||||
{"url": "mid", "type": "image", "nsfwLevel": 4},
|
||||
{"url": "safe", "type": "image", "nsfwLevel": 1},
|
||||
]
|
||||
|
||||
selected, level = select_preview_media(images, blur_mature_content=True)
|
||||
|
||||
assert selected["url"] == "safe"
|
||||
assert level == 1
|
||||
|
||||
|
||||
def test_select_preview_returns_lowest_when_no_safe_media():
|
||||
images = [
|
||||
{"url": "x", "type": "image", "nsfwLevel": 16},
|
||||
{"url": "r", "type": "image", "nsfwLevel": 4},
|
||||
{"url": "xx", "type": "image", "nsfwLevel": 8},
|
||||
]
|
||||
|
||||
selected, level = select_preview_media(images, blur_mature_content=True)
|
||||
|
||||
assert selected["url"] == "r"
|
||||
assert level == 4
|
||||
from py.utils.constants import NSFW_LEVELS
|
||||
from py.utils.preview_selection import resolve_mature_threshold, select_preview_media
|
||||
|
||||
|
||||
def test_select_preview_returns_first_when_blur_disabled():
|
||||
@@ -37,3 +14,36 @@ def test_select_preview_returns_first_when_blur_disabled():
|
||||
|
||||
assert selected["url"] == "nsfw"
|
||||
assert level == 32
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("threshold_name", "expected_url"),
|
||||
[
|
||||
("PG13", "pg"),
|
||||
("R", "pg13"),
|
||||
("X", "r"),
|
||||
("XXX", "x"),
|
||||
],
|
||||
)
|
||||
def test_select_preview_respects_configurable_threshold(threshold_name, expected_url):
|
||||
images = [
|
||||
{"url": "xxx", "type": "image", "nsfwLevel": NSFW_LEVELS["XXX"]},
|
||||
{"url": "x", "type": "image", "nsfwLevel": NSFW_LEVELS["X"]},
|
||||
{"url": "r", "type": "image", "nsfwLevel": NSFW_LEVELS["R"]},
|
||||
{"url": "pg13", "type": "image", "nsfwLevel": NSFW_LEVELS["PG13"]},
|
||||
{"url": "pg", "type": "image", "nsfwLevel": NSFW_LEVELS["PG"]},
|
||||
]
|
||||
|
||||
selected, level = select_preview_media(
|
||||
images,
|
||||
blur_mature_content=True,
|
||||
mature_threshold=NSFW_LEVELS[threshold_name],
|
||||
)
|
||||
|
||||
assert selected["url"] == expected_url
|
||||
assert level == next(item["nsfwLevel"] for item in images if item["url"] == expected_url)
|
||||
|
||||
|
||||
def test_resolve_mature_threshold_falls_back_to_r_for_invalid_value():
|
||||
assert resolve_mature_threshold({"mature_blur_level": "invalid"}) == NSFW_LEVELS["R"]
|
||||
assert resolve_mature_threshold({}) == NSFW_LEVELS["R"]
|
||||
|
||||
Reference in New Issue
Block a user