feat(nodes): enhance LoRA loading with path support and add tests

- Allow direct file paths in addition to registered LoRA names
- Add graceful handling for missing LoRA files with warning logs
- Add comprehensive unit tests for missing LoRA file handling
- Ensure backward compatibility with existing LoRA loading behavior
This commit is contained in:
Will Miao
2025-10-29 22:39:08 +08:00
parent 39586f4a20
commit b99d78bda6
2 changed files with 51 additions and 3 deletions

44
tests/nodes/test_utils.py Normal file
View File

@@ -0,0 +1,44 @@
import logging
from py.nodes.utils import nunchaku_load_lora
class _DummyTransformer:
pass
class _DummyModelConfig:
def __init__(self):
self.unet_config = {"in_channels": 4}
class _DummyDiffusionModel:
def __init__(self):
self.model = _DummyTransformer()
self.loras = []
class _DummyModelWrapper:
def __init__(self):
self.diffusion_model = _DummyDiffusionModel()
self.model_config = _DummyModelConfig()
class _DummyModel:
def __init__(self):
self.model = _DummyModelWrapper()
def test_nunchaku_load_lora_skips_missing_lora(monkeypatch, caplog):
import folder_paths
dummy_model = _DummyModel()
monkeypatch.setattr(folder_paths, "get_full_path", lambda *_args, **_kwargs: None, raising=False)
with caplog.at_level(logging.WARNING):
result_model = nunchaku_load_lora(dummy_model, "missing_lora", 0.5)
assert result_model is dummy_model
assert dummy_model.model.diffusion_model.loras == []
assert "missing_lora" in caplog.text