MyCustomNodes / Salia_Load_Loda_Wan.py
saliacoel's picture
Upload 2 files
5d2a0b7 verified
import logging
import os
import shutil
import subprocess
import urllib.parse
import urllib.request
import comfy.sd
import comfy.utils
import folder_paths
HF_REPO_BASE = "https://huggingface.co/saliacoel/x/resolve/main"
class Salia_Load_Lora_Wan:
"""
Downloads paired *_HI / *_LO LoRAs from the public Hugging Face repo
`saliacoel/x` when missing, then applies them model-only to two separate
MODEL inputs.
Input examples that all resolve to the same pair:
- Fade_to_Black
- Fade_to_Black_HI
- Fade_to_Black_LO
- Fade_to_Black_HI.safetensors
"""
CATEGORY = "loaders/saliacoel"
FUNCTION = "load_dual_loras"
RETURN_TYPES = ("MODEL", "MODEL")
RETURN_NAMES = ("loaded_out_HI", "loaded_out_LO")
DESCRIPTION = (
"Downloads missing *_HI/*_LO LoRAs from saliacoel/x and applies them "
"with model-only LoRA loading to separate HI and LO model inputs."
)
OUTPUT_TOOLTIPS = (
"model_in_HI with the *_HI LoRA applied.",
"model_in_LO with the *_LO LoRA applied.",
)
def __init__(self):
self.loaded_lora_hi = None
self.loaded_lora_lo = None
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"filename": (
"STRING",
{
"default": "",
"multiline": False,
"placeholder": "Fade_to_Black or Fade_to_Black_HI",
"tooltip": (
"Base LoRA name. A trailing _HI, _LO, or .safetensors "
"is accepted and normalized automatically."
),
},
),
"model_in_HI": (
"MODEL",
{"tooltip": "The MODEL input that will receive the *_HI LoRA."},
),
"model_in_LO": (
"MODEL",
{"tooltip": "The MODEL input that will receive the *_LO LoRA."},
),
"strength_HI": (
"FLOAT",
{
"default": 1.0,
"min": -100.0,
"max": 100.0,
"step": 0.01,
"tooltip": "Strength used when applying the *_HI LoRA.",
},
),
"strength_LO": (
"FLOAT",
{
"default": 1.0,
"min": -100.0,
"max": 100.0,
"step": 0.01,
"tooltip": "Strength used when applying the *_LO LoRA.",
},
),
}
}
@staticmethod
def _normalize_base_name(name: str) -> str:
base = (name or "").strip()
if not base:
raise ValueError("filename cannot be empty.")
# Drop any directory component to avoid path traversal or accidental nesting.
base = os.path.basename(base)
if base.lower().endswith(".safetensors"):
base = base[: -len(".safetensors")]
if base.endswith("_HI"):
base = base[: -len("_HI")]
elif base.endswith("_LO"):
base = base[: -len("_LO")]
if not base:
raise ValueError("filename resolves to an empty base name.")
return base
@classmethod
def _build_pair_names(cls, name: str) -> tuple[str, str]:
base = cls._normalize_base_name(name)
return f"{base}_HI.safetensors", f"{base}_LO.safetensors"
@staticmethod
def _download_file(url: str, target_path: str) -> None:
os.makedirs(os.path.dirname(target_path), exist_ok=True)
tmp_path = target_path + ".download"
if os.path.exists(tmp_path):
os.remove(tmp_path)
wget_path = shutil.which("wget")
try:
if wget_path:
subprocess.run(
[wget_path, "-O", tmp_path, url],
check=True,
cwd=os.path.dirname(target_path),
)
else:
request = urllib.request.Request(
url,
headers={"User-Agent": "ComfyUI-SaliacoelDualRepoLoraModelOnly/1.0"},
)
with urllib.request.urlopen(request) as response, open(tmp_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
os.replace(tmp_path, target_path)
except Exception:
if os.path.exists(tmp_path):
os.remove(tmp_path)
raise
@classmethod
def _ensure_lora_available(cls, lora_name: str) -> str:
existing_path = folder_paths.get_full_path("loras", lora_name)
if existing_path is not None:
return existing_path
lora_dirs = folder_paths.get_folder_paths("loras")
if not lora_dirs:
raise RuntimeError("No ComfyUI 'loras' folder is configured.")
target_dir = lora_dirs[0]
target_path = os.path.join(target_dir, lora_name)
url = f"{HF_REPO_BASE}/{urllib.parse.quote(lora_name)}"
logging.info("[SaliacoelDualRepoLoraModelOnly] Downloading missing LoRA: %s", url)
cls._download_file(url, target_path)
# Re-resolve through ComfyUI's folder lookup so the returned path matches
# normal Comfy conventions if multiple LoRA directories are configured.
resolved_path = folder_paths.get_full_path("loras", lora_name)
return resolved_path if resolved_path is not None else target_path
def _get_or_load_lora(self, cache_attr: str, lora_path: str):
cached = getattr(self, cache_attr)
if cached is not None and cached[0] == lora_path:
return cached[1]
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
setattr(self, cache_attr, (lora_path, lora))
return lora
def _apply_lora_model_only(self, model, lora_path: str, strength: float, cache_attr: str):
if strength == 0:
return model
lora = self._get_or_load_lora(cache_attr, lora_path)
model_lora, _ = comfy.sd.load_lora_for_models(model, None, lora, strength, 0)
return model_lora
def load_dual_loras(self, filename, model_in_HI, model_in_LO, strength_HI, strength_LO):
hi_name, lo_name = self._build_pair_names(filename)
hi_path = self._ensure_lora_available(hi_name)
lo_path = self._ensure_lora_available(lo_name)
loaded_out_HI = self._apply_lora_model_only(
model=model_in_HI,
lora_path=hi_path,
strength=strength_HI,
cache_attr="loaded_lora_hi",
)
loaded_out_LO = self._apply_lora_model_only(
model=model_in_LO,
lora_path=lo_path,
strength=strength_LO,
cache_attr="loaded_lora_lo",
)
return (loaded_out_HI, loaded_out_LO)
NODE_CLASS_MAPPINGS = {
"Salia_Load_Lora_Wan": Salia_Load_Lora_Wan,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"Salia_Load_Lora_Wan": "Salia Dual LoRA Loader (Model Only)",
}