diff --git a/py/nodes/utils.py b/py/nodes/utils.py index 4be6e9a3..5ea94265 100644 --- a/py/nodes/utils.py +++ b/py/nodes/utils.py @@ -110,10 +110,14 @@ def nunchaku_load_lora(model, lora_name, lora_strength): model_wrapper.model = transformer ret_model_wrapper.model = transformer - # Get full path to the LoRA file - lora_path = folder_paths.get_full_path("loras", lora_name) + # Get full path to the LoRA file. Allow both direct paths and registered LoRA names. + lora_path = lora_name if os.path.isfile(lora_name) else folder_paths.get_full_path("loras", lora_name) + if not lora_path or not os.path.isfile(lora_path): + logger.warning("Skipping LoRA '%s' because it could not be found", lora_name) + return model + ret_model_wrapper.loras.append((lora_path, lora_strength)) - + # Convert the LoRA to diffusers format sd = to_diffusers(lora_path) diff --git a/tests/nodes/test_utils.py b/tests/nodes/test_utils.py new file mode 100644 index 00000000..81efd34f --- /dev/null +++ b/tests/nodes/test_utils.py @@ -0,0 +1,44 @@ +import logging + +from py.nodes.utils import nunchaku_load_lora + + +class _DummyTransformer: + pass + + +class _DummyModelConfig: + def __init__(self): + self.unet_config = {"in_channels": 4} + + +class _DummyDiffusionModel: + def __init__(self): + self.model = _DummyTransformer() + self.loras = [] + + +class _DummyModelWrapper: + def __init__(self): + self.diffusion_model = _DummyDiffusionModel() + self.model_config = _DummyModelConfig() + + +class _DummyModel: + def __init__(self): + self.model = _DummyModelWrapper() + + +def test_nunchaku_load_lora_skips_missing_lora(monkeypatch, caplog): + import folder_paths + + dummy_model = _DummyModel() + + monkeypatch.setattr(folder_paths, "get_full_path", lambda *_args, **_kwargs: None, raising=False) + + with caplog.at_level(logging.WARNING): + result_model = nunchaku_load_lora(dummy_model, "missing_lora", 0.5) + + assert result_model is dummy_model + assert dummy_model.model.diffusion_model.loras == [] + assert "missing_lora" in caplog.text