Datasets:
Upload parquet_to_raw.py with huggingface_hub
Browse files- parquet_to_raw.py +117 -0
parquet_to_raw.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Convert a parquet-format HuggingFace dataset back to raw image files.
|
| 3 |
+
|
| 4 |
+
Input: a local directory of .parquet shards OR an HF repo ID
|
| 5 |
+
Output: GSD/{split}/{image,mask,reflections,edge}/<image_id>.png + metadata.jsonl
|
| 6 |
+
"""
|
| 7 |
+
import argparse
|
| 8 |
+
import json
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
|
| 11 |
+
from datasets import load_dataset, load_from_disk
|
| 12 |
+
from huggingface_hub import HfApi
|
| 13 |
+
|
| 14 |
+
# split -> (has_reflections, has_edge)
|
| 15 |
+
SPLITS = {
|
| 16 |
+
"train": (True, True),
|
| 17 |
+
"extra": (False, True),
|
| 18 |
+
"test": (False, False),
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def save_split(ds, split: str, has_reflections: bool, has_edge: bool, out_dir: Path):
|
| 23 |
+
split_dir = out_dir / split
|
| 24 |
+
img_dir = split_dir / "image"; img_dir.mkdir(parents=True, exist_ok=True)
|
| 25 |
+
mask_dir = split_dir / "mask"; mask_dir.mkdir(parents=True, exist_ok=True)
|
| 26 |
+
refl_dir = split_dir / "reflections" if has_reflections else None
|
| 27 |
+
edge_dir = split_dir / "edge" if has_edge else None
|
| 28 |
+
if refl_dir: refl_dir.mkdir(parents=True, exist_ok=True)
|
| 29 |
+
if edge_dir: edge_dir.mkdir(parents=True, exist_ok=True)
|
| 30 |
+
|
| 31 |
+
rows = []
|
| 32 |
+
for i, sample in enumerate(ds):
|
| 33 |
+
stem = sample.get("image_id") or f"{i:06d}"
|
| 34 |
+
|
| 35 |
+
img_path = img_dir / f"{stem}.png"
|
| 36 |
+
mask_path = mask_dir / f"{stem}.png"
|
| 37 |
+
sample["image"].save(img_path)
|
| 38 |
+
sample["mask"].save(mask_path)
|
| 39 |
+
|
| 40 |
+
refl_rel = edge_rel = ""
|
| 41 |
+
if has_reflections and sample.get("reflections") is not None:
|
| 42 |
+
p = refl_dir / f"{stem}.png"
|
| 43 |
+
sample["reflections"].save(p)
|
| 44 |
+
refl_rel = f"reflections/{p.name}"
|
| 45 |
+
if has_edge and sample.get("edge") is not None:
|
| 46 |
+
p = edge_dir / f"{stem}.png"
|
| 47 |
+
sample["edge"].save(p)
|
| 48 |
+
edge_rel = f"edge/{p.name}"
|
| 49 |
+
|
| 50 |
+
rows.append({
|
| 51 |
+
"file_name": f"image/{img_path.name}",
|
| 52 |
+
"mask": f"mask/{mask_path.name}",
|
| 53 |
+
"reflections": refl_rel,
|
| 54 |
+
"edge": edge_rel,
|
| 55 |
+
})
|
| 56 |
+
|
| 57 |
+
if (i + 1) % 100 == 0:
|
| 58 |
+
print(f" {i + 1}/{len(ds)}")
|
| 59 |
+
|
| 60 |
+
(split_dir / "metadata.jsonl").write_text(
|
| 61 |
+
"\n".join(json.dumps(r) for r in rows)
|
| 62 |
+
)
|
| 63 |
+
print(f" saved {len(rows)} samples -> {split_dir}")
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def main():
|
| 67 |
+
parser = argparse.ArgumentParser(description="Convert parquet dataset to raw images")
|
| 68 |
+
src = parser.add_mutually_exclusive_group(required=True)
|
| 69 |
+
src.add_argument("--repo", help="HuggingFace repo ID, e.g. garrying/GSD")
|
| 70 |
+
src.add_argument("--local", help="Path to a directory saved with save_to_disk()")
|
| 71 |
+
parser.add_argument("--out", default="GSD", help="Output root directory (default: ./GSD)")
|
| 72 |
+
parser.add_argument("--splits", nargs="+", choices=list(SPLITS), default=list(SPLITS),
|
| 73 |
+
help="Which splits to convert (default: all)")
|
| 74 |
+
parser.add_argument("--upload-to", metavar="REPO_ID",
|
| 75 |
+
help="Upload raw files to this HF dataset repo after conversion, e.g. garrying/GSD-raw")
|
| 76 |
+
args = parser.parse_args()
|
| 77 |
+
|
| 78 |
+
out_dir = Path(args.out)
|
| 79 |
+
out_dir.mkdir(parents=True, exist_ok=True)
|
| 80 |
+
|
| 81 |
+
for split in args.splits:
|
| 82 |
+
has_reflections, has_edge = SPLITS[split]
|
| 83 |
+
print(f"\nLoading {split}...")
|
| 84 |
+
|
| 85 |
+
if args.repo:
|
| 86 |
+
ds = load_dataset(args.repo, split=split)
|
| 87 |
+
else:
|
| 88 |
+
ds = load_from_disk(str(Path(args.local) / split))
|
| 89 |
+
|
| 90 |
+
print(f" {len(ds)} samples — saving images...")
|
| 91 |
+
save_split(ds, split, has_reflections, has_edge, out_dir)
|
| 92 |
+
|
| 93 |
+
print(f"\nDone! Raw files in: {out_dir.resolve()}")
|
| 94 |
+
|
| 95 |
+
if args.upload_to:
|
| 96 |
+
api = HfApi()
|
| 97 |
+
api.create_repo(args.upload_to, repo_type="dataset", exist_ok=True)
|
| 98 |
+
for split in args.splits:
|
| 99 |
+
print(f"\nUploading {split} -> {args.upload_to}...")
|
| 100 |
+
api.upload_folder(
|
| 101 |
+
folder_path=str(out_dir / split),
|
| 102 |
+
path_in_repo=split,
|
| 103 |
+
repo_id=args.upload_to,
|
| 104 |
+
repo_type="dataset",
|
| 105 |
+
ignore_patterns=["*.jsonl"],
|
| 106 |
+
)
|
| 107 |
+
api.upload_file(
|
| 108 |
+
path_or_fileobj=(out_dir / split / "metadata.jsonl").read_bytes(),
|
| 109 |
+
path_in_repo=f"{split}/metadata.jsonl",
|
| 110 |
+
repo_id=args.upload_to,
|
| 111 |
+
repo_type="dataset",
|
| 112 |
+
)
|
| 113 |
+
print(f"\nUploaded! https://huggingface.co/datasets/{args.upload_to}")
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
if __name__ == "__main__":
|
| 117 |
+
main()
|