saliacoel commited on
Commit
cb6dcfe
·
verified ·
1 Parent(s): 91a5e43

Upload Salia_Load_Lora_Wan.py

Browse files
Files changed (1) hide show
  1. Salia_Load_Lora_Wan.py +218 -0
Salia_Load_Lora_Wan.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import shutil
4
+ import subprocess
5
+ import urllib.parse
6
+ import urllib.request
7
+
8
+ import comfy.sd
9
+ import comfy.utils
10
+ import folder_paths
11
+
12
+
13
+ HF_REPO_BASE = "https://huggingface.co/saliacoel/x/resolve/main"
14
+
15
+
16
+ class salia_Load_Lora_Wan:
17
+ """
18
+ Downloads paired *_HI / *_LO LoRAs from the public Hugging Face repo
19
+ `saliacoel/x` when missing, then applies them model-only to two separate
20
+ MODEL inputs.
21
+
22
+ Input examples that all resolve to the same pair:
23
+ - Fade_to_Black
24
+ - Fade_to_Black_HI
25
+ - Fade_to_Black_LO
26
+ - Fade_to_Black_HI.safetensors
27
+ """
28
+
29
+ CATEGORY = "loaders/saliacoel"
30
+ FUNCTION = "load_dual_loras"
31
+ RETURN_TYPES = ("MODEL", "MODEL")
32
+ RETURN_NAMES = ("loaded_out_HI", "loaded_out_LO")
33
+ DESCRIPTION = (
34
+ "Downloads missing *_HI/*_LO LoRAs from saliacoel/x and applies them "
35
+ "with model-only LoRA loading to separate HI and LO model inputs."
36
+ )
37
+ OUTPUT_TOOLTIPS = (
38
+ "model_in_HI with the *_HI LoRA applied.",
39
+ "model_in_LO with the *_LO LoRA applied.",
40
+ )
41
+
42
+ def __init__(self):
43
+ self.loaded_lora_hi = None
44
+ self.loaded_lora_lo = None
45
+
46
+ @classmethod
47
+ def INPUT_TYPES(cls):
48
+ return {
49
+ "required": {
50
+ "filename": (
51
+ "STRING",
52
+ {
53
+ "default": "",
54
+ "multiline": False,
55
+ "placeholder": "Fade_to_Black or Fade_to_Black_HI",
56
+ "tooltip": (
57
+ "Base LoRA name. A trailing _HI, _LO, or .safetensors "
58
+ "is accepted and normalized automatically."
59
+ ),
60
+ },
61
+ ),
62
+ "model_in_HI": (
63
+ "MODEL",
64
+ {"tooltip": "The MODEL input that will receive the *_HI LoRA."},
65
+ ),
66
+ "model_in_LO": (
67
+ "MODEL",
68
+ {"tooltip": "The MODEL input that will receive the *_LO LoRA."},
69
+ ),
70
+ "strength_HI": (
71
+ "FLOAT",
72
+ {
73
+ "default": 1.0,
74
+ "min": -100.0,
75
+ "max": 100.0,
76
+ "step": 0.01,
77
+ "tooltip": "Strength used when applying the *_HI LoRA.",
78
+ },
79
+ ),
80
+ "strength_LO": (
81
+ "FLOAT",
82
+ {
83
+ "default": 1.0,
84
+ "min": -100.0,
85
+ "max": 100.0,
86
+ "step": 0.01,
87
+ "tooltip": "Strength used when applying the *_LO LoRA.",
88
+ },
89
+ ),
90
+ }
91
+ }
92
+
93
+ @staticmethod
94
+ def _normalize_base_name(name: str) -> str:
95
+ base = (name or "").strip()
96
+ if not base:
97
+ raise ValueError("filename cannot be empty.")
98
+
99
+ # Drop any directory component to avoid path traversal or accidental nesting.
100
+ base = os.path.basename(base)
101
+
102
+ if base.lower().endswith(".safetensors"):
103
+ base = base[: -len(".safetensors")]
104
+
105
+ if base.endswith("_HI"):
106
+ base = base[: -len("_HI")]
107
+ elif base.endswith("_LO"):
108
+ base = base[: -len("_LO")]
109
+
110
+ if not base:
111
+ raise ValueError("filename resolves to an empty base name.")
112
+
113
+ return base
114
+
115
+ @classmethod
116
+ def _build_pair_names(cls, name: str) -> tuple[str, str]:
117
+ base = cls._normalize_base_name(name)
118
+ return f"{base}_HI.safetensors", f"{base}_LO.safetensors"
119
+
120
+ @staticmethod
121
+ def _download_file(url: str, target_path: str) -> None:
122
+ os.makedirs(os.path.dirname(target_path), exist_ok=True)
123
+ tmp_path = target_path + ".download"
124
+
125
+ if os.path.exists(tmp_path):
126
+ os.remove(tmp_path)
127
+
128
+ wget_path = shutil.which("wget")
129
+
130
+ try:
131
+ if wget_path:
132
+ subprocess.run(
133
+ [wget_path, "-O", tmp_path, url],
134
+ check=True,
135
+ cwd=os.path.dirname(target_path),
136
+ )
137
+ else:
138
+ request = urllib.request.Request(
139
+ url,
140
+ headers={"User-Agent": "ComfyUI-SaliacoelDualRepoLoraModelOnly/1.0"},
141
+ )
142
+ with urllib.request.urlopen(request) as response, open(tmp_path, "wb") as out_file:
143
+ shutil.copyfileobj(response, out_file)
144
+
145
+ os.replace(tmp_path, target_path)
146
+ except Exception:
147
+ if os.path.exists(tmp_path):
148
+ os.remove(tmp_path)
149
+ raise
150
+
151
+ @classmethod
152
+ def _ensure_lora_available(cls, lora_name: str) -> str:
153
+ existing_path = folder_paths.get_full_path("loras", lora_name)
154
+ if existing_path is not None:
155
+ return existing_path
156
+
157
+ lora_dirs = folder_paths.get_folder_paths("loras")
158
+ if not lora_dirs:
159
+ raise RuntimeError("No ComfyUI 'loras' folder is configured.")
160
+
161
+ target_dir = lora_dirs[0]
162
+ target_path = os.path.join(target_dir, lora_name)
163
+ url = f"{HF_REPO_BASE}/{urllib.parse.quote(lora_name)}"
164
+
165
+ logging.info("[SaliacoelDualRepoLoraModelOnly] Downloading missing LoRA: %s", url)
166
+ cls._download_file(url, target_path)
167
+
168
+ # Re-resolve through ComfyUI's folder lookup so the returned path matches
169
+ # normal Comfy conventions if multiple LoRA directories are configured.
170
+ resolved_path = folder_paths.get_full_path("loras", lora_name)
171
+ return resolved_path if resolved_path is not None else target_path
172
+
173
+ def _get_or_load_lora(self, cache_attr: str, lora_path: str):
174
+ cached = getattr(self, cache_attr)
175
+ if cached is not None and cached[0] == lora_path:
176
+ return cached[1]
177
+
178
+ lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
179
+ setattr(self, cache_attr, (lora_path, lora))
180
+ return lora
181
+
182
+ def _apply_lora_model_only(self, model, lora_path: str, strength: float, cache_attr: str):
183
+ if strength == 0:
184
+ return model
185
+
186
+ lora = self._get_or_load_lora(cache_attr, lora_path)
187
+ model_lora, _ = comfy.sd.load_lora_for_models(model, None, lora, strength, 0)
188
+ return model_lora
189
+
190
+ def load_dual_loras(self, filename, model_in_HI, model_in_LO, strength_HI, strength_LO):
191
+ hi_name, lo_name = self._build_pair_names(filename)
192
+
193
+ hi_path = self._ensure_lora_available(hi_name)
194
+ lo_path = self._ensure_lora_available(lo_name)
195
+
196
+ loaded_out_HI = self._apply_lora_model_only(
197
+ model=model_in_HI,
198
+ lora_path=hi_path,
199
+ strength=strength_HI,
200
+ cache_attr="loaded_lora_hi",
201
+ )
202
+ loaded_out_LO = self._apply_lora_model_only(
203
+ model=model_in_LO,
204
+ lora_path=lo_path,
205
+ strength=strength_LO,
206
+ cache_attr="loaded_lora_lo",
207
+ )
208
+
209
+ return (loaded_out_HI, loaded_out_LO)
210
+
211
+
212
+ NODE_CLASS_MAPPINGS = {
213
+ "salia_Load_Lora_Wan": salia_Load_Lora_Wan,
214
+ }
215
+
216
+ NODE_DISPLAY_NAME_MAPPINGS = {
217
+ "salia_Load_Lora_Wan": "Salia Load Dual LoRA Loader (Model Only)",
218
+ }