saliacoel commited on
Commit
ca580d9
·
verified ·
1 Parent(s): 31b2025

Upload Salia_Upload_TMP.py

Browse files
Files changed (1) hide show
  1. Salia_Upload_TMP.py +295 -0
Salia_Upload_TMP.py ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+ import os
3
+ import re
4
+ import zipfile
5
+ from urllib.parse import quote
6
+
7
+ import numpy as np
8
+
9
+ REPO_ID = "saliacoel/tmp"
10
+ REPO_TYPE = "model"
11
+ BRANCH = "main"
12
+ CATEGORY = "Salia/HuggingFace"
13
+
14
+ _INVALID_FILENAME_CHARS = re.compile(r'[<>:"\\|?*\x00-\x1F]')
15
+
16
+
17
+ def _require_non_empty_string(value, name):
18
+ if value is None:
19
+ raise ValueError(f"{name} is required.")
20
+ value = str(value).strip()
21
+ if not value:
22
+ raise ValueError(f"{name} must not be empty.")
23
+ return value
24
+
25
+
26
+ def _sanitize_filename(filename: str) -> str:
27
+ filename = _require_non_empty_string(filename, "filename")
28
+ filename = filename.replace("\\", "/").split("/")[-1]
29
+ filename = _INVALID_FILENAME_CHARS.sub("_", filename)
30
+ filename = filename.strip().strip(".")
31
+ if not filename:
32
+ raise ValueError("filename became empty after sanitization.")
33
+ return filename
34
+
35
+
36
+ def _strip_known_suffixes(filename: str, suffixes) -> str:
37
+ base = _sanitize_filename(filename)
38
+ lower = base.lower()
39
+ for suffix in suffixes:
40
+ if lower.endswith(suffix.lower()):
41
+ return base[: -len(suffix)]
42
+ return base
43
+
44
+
45
+ def _ensure_extension(filename: str, extension: str) -> str:
46
+ filename = _sanitize_filename(filename)
47
+ if filename.lower().endswith(extension.lower()):
48
+ return filename
49
+ return f"{filename}{extension}"
50
+
51
+
52
+ def _quote_repo_path(path_in_repo: str) -> str:
53
+ return "/".join(quote(part, safe="") for part in path_in_repo.split("/"))
54
+
55
+
56
+ def _repo_file_url(path_in_repo: str) -> str:
57
+ quoted_path = _quote_repo_path(path_in_repo)
58
+ return f"https://huggingface.co/{REPO_ID}/resolve/{BRANCH}/{quoted_path}"
59
+
60
+
61
+ def _import_hf_api():
62
+ try:
63
+ from huggingface_hub import HfApi
64
+ except Exception as exc:
65
+ raise RuntimeError(
66
+ "Missing dependency 'huggingface_hub'. Install it in the ComfyUI Python environment with: pip install huggingface_hub"
67
+ ) from exc
68
+ return HfApi
69
+
70
+
71
+ def _upload_bytes(file_bytes: bytes, path_in_repo: str, hf_token: str, commit_message: str):
72
+ hf_token = _require_non_empty_string(hf_token, "hf_token")
73
+ path_in_repo = _sanitize_filename(path_in_repo)
74
+
75
+ HfApi = _import_hf_api()
76
+ api = HfApi(token=hf_token)
77
+
78
+ commit_info = api.upload_file(
79
+ path_or_fileobj=file_bytes,
80
+ path_in_repo=path_in_repo,
81
+ repo_id=REPO_ID,
82
+ repo_type=REPO_TYPE,
83
+ token=hf_token,
84
+ commit_message=commit_message,
85
+ )
86
+
87
+ commit_url = getattr(commit_info, "commit_url", None)
88
+ if not commit_url:
89
+ commit_url = str(commit_info)
90
+
91
+ return path_in_repo, _repo_file_url(path_in_repo), commit_url
92
+
93
+
94
+ def _tensor_to_png_bytes(img):
95
+ try:
96
+ from PIL import Image
97
+ except Exception as exc:
98
+ raise RuntimeError(
99
+ "Missing dependency 'Pillow'. Install it in the ComfyUI Python environment with: pip install pillow"
100
+ ) from exc
101
+
102
+ if img is None:
103
+ raise ValueError("img is required.")
104
+
105
+ if hasattr(img, "detach"):
106
+ img = img.detach()
107
+ if hasattr(img, "cpu"):
108
+ img = img.cpu()
109
+ if hasattr(img, "numpy"):
110
+ arr = img.numpy()
111
+ else:
112
+ arr = np.asarray(img)
113
+
114
+ if arr.ndim == 4:
115
+ if arr.shape[0] == 0:
116
+ raise ValueError("Received an empty image batch.")
117
+ if arr.shape[0] != 1:
118
+ raise ValueError(
119
+ f"Expected a single image, but received a batch of {arr.shape[0]}. Use an image selector/split node first."
120
+ )
121
+ arr = arr[0]
122
+
123
+ if arr.ndim != 3:
124
+ raise ValueError(f"Expected image tensor with 3 dimensions after batch removal, got shape {arr.shape}.")
125
+
126
+ channels = int(arr.shape[2])
127
+ if channels not in (3, 4):
128
+ raise ValueError(
129
+ f"Expected 3 channels (RGB) or 4 channels (RGBA), but got {channels} channels."
130
+ )
131
+
132
+ if np.issubdtype(arr.dtype, np.floating):
133
+ arr = np.clip(arr, 0.0, 1.0)
134
+ arr = np.rint(arr * 255.0).astype(np.uint8)
135
+ else:
136
+ arr = np.clip(arr, 0, 255).astype(np.uint8)
137
+
138
+ mode = "RGBA" if channels == 4 else "RGB"
139
+ pil_image = Image.fromarray(arr, mode=mode)
140
+
141
+ buffer = io.BytesIO()
142
+ pil_image.save(buffer, format="PNG")
143
+ return buffer.getvalue(), mode
144
+
145
+
146
+ def _text_to_bytes(text: str) -> bytes:
147
+ if text is None:
148
+ text = ""
149
+ return str(text).encode("utf-8")
150
+
151
+
152
+ def _build_split_text_files(text: str, filename: str, linebreaks_per_file: int):
153
+ base_name = _strip_known_suffixes(filename, [".zip", ".txt"])
154
+ text = "" if text is None else str(text)
155
+
156
+ if linebreaks_per_file is None:
157
+ linebreaks_per_file = -1
158
+
159
+ try:
160
+ linebreaks_per_file = int(linebreaks_per_file)
161
+ except Exception as exc:
162
+ raise ValueError("linebreaks_per_file must be an integer.") from exc
163
+
164
+ if linebreaks_per_file <= 0:
165
+ return [(f"{base_name}.txt", text)]
166
+
167
+ lines = text.splitlines()
168
+ if not lines:
169
+ return [(f"{base_name}_1_to_1.txt", "")]
170
+
171
+ files = []
172
+ for start_idx in range(0, len(lines), linebreaks_per_file):
173
+ end_idx = min(start_idx + linebreaks_per_file, len(lines))
174
+ start_line = start_idx + 1
175
+ end_line = end_idx
176
+ chunk_name = f"{base_name}_{start_line}_to_{end_line}.txt"
177
+ chunk_text = "\n".join(lines[start_idx:end_idx])
178
+ files.append((chunk_name, chunk_text))
179
+
180
+ return files
181
+
182
+
183
+ def _zip_named_text_files(named_files):
184
+ buffer = io.BytesIO()
185
+ with zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) as zf:
186
+ for arcname, text in named_files:
187
+ zf.writestr(arcname, _text_to_bytes(text))
188
+ return buffer.getvalue()
189
+
190
+
191
+ class _SaliaUploadBase:
192
+ CATEGORY = CATEGORY
193
+ OUTPUT_NODE = True
194
+ RETURN_TYPES = ("STRING", "STRING", "STRING")
195
+ RETURN_NAMES = ("path_in_repo", "file_url", "commit_url")
196
+
197
+
198
+ class Salia_Upload_TMP_img(_SaliaUploadBase):
199
+ @classmethod
200
+ def INPUT_TYPES(cls):
201
+ return {
202
+ "required": {
203
+ "img": ("IMAGE", {}),
204
+ "hf_token": ("STRING", {"default": "", "multiline": False, "placeholder": "hf_..."}),
205
+ "filename": ("STRING", {"default": "image", "multiline": False}),
206
+ }
207
+ }
208
+
209
+ FUNCTION = "upload"
210
+ DESCRIPTION = "Upload one RGB or RGBA image as a PNG to saliacoel/tmp on Hugging Face Hub."
211
+ SEARCH_ALIASES = ["upload png to hf", "salia tmp image upload"]
212
+
213
+ def upload(self, img, hf_token, filename):
214
+ png_name = _ensure_extension(filename, ".png")
215
+ png_bytes, mode = _tensor_to_png_bytes(img)
216
+ return _upload_bytes(
217
+ png_bytes,
218
+ png_name,
219
+ hf_token,
220
+ f"ComfyUI upload {png_name} ({mode})",
221
+ )
222
+
223
+
224
+ class Salia_Upload_TMP_txt(_SaliaUploadBase):
225
+ @classmethod
226
+ def INPUT_TYPES(cls):
227
+ return {
228
+ "required": {
229
+ "text": ("STRING", {"default": "", "multiline": True}),
230
+ "hf_token": ("STRING", {"default": "", "multiline": False, "placeholder": "hf_..."}),
231
+ "filename": ("STRING", {"default": "text", "multiline": False}),
232
+ }
233
+ }
234
+
235
+ FUNCTION = "upload"
236
+ DESCRIPTION = "Upload text as a UTF-8 .txt file to saliacoel/tmp on Hugging Face Hub."
237
+ SEARCH_ALIASES = ["upload txt to hf", "salia tmp text upload"]
238
+
239
+ def upload(self, text, hf_token, filename):
240
+ txt_name = _ensure_extension(filename, ".txt")
241
+ txt_bytes = _text_to_bytes(text)
242
+ return _upload_bytes(
243
+ txt_bytes,
244
+ txt_name,
245
+ hf_token,
246
+ f"ComfyUI upload {txt_name}",
247
+ )
248
+
249
+
250
+ class Salia_Upload_TMP_split_txt_to_zip(_SaliaUploadBase):
251
+ @classmethod
252
+ def INPUT_TYPES(cls):
253
+ return {
254
+ "required": {
255
+ "text": ("STRING", {"default": "", "multiline": True}),
256
+ "hf_token": ("STRING", {"default": "", "multiline": False, "placeholder": "hf_..."}),
257
+ "filename": ("STRING", {"default": "text_bundle", "multiline": False}),
258
+ "linebreaks_per_file": ("INT", {"default": -1, "min": -1, "max": 100000000, "step": 1}),
259
+ }
260
+ }
261
+
262
+ FUNCTION = "upload"
263
+ DESCRIPTION = "Split text into numbered .txt files, zip them, and upload the .zip to saliacoel/tmp on Hugging Face Hub."
264
+ SEARCH_ALIASES = [
265
+ "Salia_Upload_TMP_split_txt",
266
+ "split txt to zip",
267
+ "upload zip to hf",
268
+ ]
269
+
270
+ def upload(self, text, hf_token, filename, linebreaks_per_file):
271
+ base_name = _strip_known_suffixes(filename, [".zip", ".txt"])
272
+ zip_name = f"{base_name}.zip"
273
+ named_files = _build_split_text_files(text, base_name, linebreaks_per_file)
274
+ zip_bytes = _zip_named_text_files(named_files)
275
+ return _upload_bytes(
276
+ zip_bytes,
277
+ zip_name,
278
+ hf_token,
279
+ f"ComfyUI upload {zip_name} ({len(named_files)} files)",
280
+ )
281
+
282
+
283
+ NODE_CLASS_MAPPINGS = {
284
+ "Salia_Upload_TMP_img": Salia_Upload_TMP_img,
285
+ "Salia_Upload_TMP_txt": Salia_Upload_TMP_txt,
286
+ "Salia_Upload_TMP_split_txt_to_zip": Salia_Upload_TMP_split_txt_to_zip,
287
+ }
288
+
289
+ NODE_DISPLAY_NAME_MAPPINGS = {
290
+ "Salia_Upload_TMP_img": "Salia_Upload_TMP_img",
291
+ "Salia_Upload_TMP_txt": "Salia_Upload_TMP_txt",
292
+ "Salia_Upload_TMP_split_txt_to_zip": "Salia_Upload_TMP_split_txt_to_zip",
293
+ }
294
+
295
+ __all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"]