| |
| """Batch render aligned scenes from different datasets: generate GLB files and render top/diagonal views. |
| |
| This script processes the aligned unified-layout data (Z-up coordinate system). |
| |
| Supports: scannet, arkitscenes, 3rscan, 3d-front |
| |
| Usage: |
| # Step 1: Generate all GLB files (use internscenes environment) |
| conda activate internscenes |
| python tools/post_process/render_aligned_scenes.py --dataset scannet --mode compose --workers 4 |
| |
| # Step 2: Render all views (use blender environment) |
| conda activate blender |
| python tools/post_process/render_aligned_scenes.py --dataset scannet --mode render --workers 8 |
| |
| # Process all datasets |
| python tools/post_process/render_aligned_scenes.py --dataset all --mode render --workers 8 |
| """ |
|
|
| from __future__ import annotations |
|
|
| import argparse |
| import os |
| import sys |
| import json |
| import subprocess |
| from pathlib import Path |
| from concurrent.futures import ProcessPoolExecutor, as_completed |
| from multiprocessing import cpu_count |
|
|
| try: |
| from tqdm import tqdm |
| except ImportError: |
| def tqdm(iterable, desc=None, total=None, **kwargs): |
| if desc: |
| print(f"{desc}...") |
| for i, item in enumerate(iterable): |
| yield item |
| tqdm.write = print |
|
|
| |
| ALIGNED_LAYOUT_DIR = "/home/v-meiszhang/backup/datas/unified-layout-aligned" |
| ASSET_LIBRARY_DIR = "/home/v-meiszhang/backup/datas/InternScenes/asset_library" |
| SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) |
| REPO_ROOT = Path(SCRIPT_DIR).parent.parent |
| BLENDER_RENDERER = str(REPO_ROOT / "InternScenes" / "InternScenes_Real2Sim" / "blender_renderer.py") |
|
|
| |
| DATASETS = ["scannet", "3rscan", "arkitscenes", "3d-front"] |
|
|
|
|
| def get_all_scenes(dataset: str) -> list[tuple[str, str]]: |
| """Get all scene names and their full paths for a dataset. |
| |
| Returns list of (scene_name, scene_dir) tuples. |
| """ |
| dataset_dir = os.path.join(ALIGNED_LAYOUT_DIR, dataset) |
| scenes = [] |
| |
| if not os.path.isdir(dataset_dir): |
| print(f"Warning: Dataset directory not found: {dataset_dir}") |
| return scenes |
| |
| if dataset == "arkitscenes": |
| |
| for split in ["Training", "Validation"]: |
| split_dir = os.path.join(dataset_dir, split) |
| if os.path.isdir(split_dir): |
| for scene_name in sorted(os.listdir(split_dir)): |
| scene_path = os.path.join(split_dir, scene_name) |
| layout_json = os.path.join(scene_path, "layout.json") |
| if os.path.isdir(scene_path) and os.path.exists(layout_json): |
| full_scene_name = f"{dataset}/{split}/{scene_name}" |
| scenes.append((full_scene_name, scene_path)) |
| elif dataset == "3d-front": |
| |
| for uuid_dir in sorted(os.listdir(dataset_dir)): |
| uuid_path = os.path.join(dataset_dir, uuid_dir) |
| if os.path.isdir(uuid_path): |
| for room_name in sorted(os.listdir(uuid_path)): |
| room_path = os.path.join(uuid_path, room_name) |
| layout_json = os.path.join(room_path, "layout.json") |
| if os.path.isdir(room_path) and os.path.exists(layout_json): |
| full_scene_name = f"{dataset}/{uuid_dir}/{room_name}" |
| scenes.append((full_scene_name, room_path)) |
| else: |
| |
| for scene_name in sorted(os.listdir(dataset_dir)): |
| scene_path = os.path.join(dataset_dir, scene_name) |
| layout_json = os.path.join(scene_path, "layout.json") |
| if os.path.isdir(scene_path) and os.path.exists(layout_json): |
| full_scene_name = f"{dataset}/{scene_name}" |
| scenes.append((full_scene_name, scene_path)) |
| |
| return scenes |
|
|
|
|
| def convert_unified_layout_to_instance_infos(layout_data: dict) -> list: |
| """将 unified-layout 格式转换为 compose_scenes 期望的 instance_infos 格式。 |
| |
| unified-layout 格式: |
| functional_zones[].assets[]: {model_id/model_uid, transform: {pos, size, rot}} |
| |
| instance_infos 格式: |
| [{model_uid, category, bbox: [cx, cy, cz, sx, sy, sz, rx, ry, rz]}] |
| """ |
| import re |
| |
| |
| uuid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}', re.IGNORECASE) |
| |
| instance_infos = [] |
| |
| for zone in layout_data.get('functional_zones', []): |
| for asset in zone.get('assets', []): |
| |
| model_id = asset.get('model_id') or asset.get('model_uid', '') |
| |
| if not model_id: |
| continue |
| |
| |
| |
| if uuid_pattern.match(model_id): |
| |
| uuid_part = model_id.split('-(')[0] if '-(' in model_id else model_id.split('-')[0:5] |
| if isinstance(uuid_part, list): |
| uuid_part = '-'.join(uuid_part) |
| model_id = f"3D-FUTURE-model/{uuid_part}" |
| |
| transform = asset.get('transform', {}) |
| pos = transform.get('pos', [0, 0, 0]) |
| size = transform.get('size', [1, 1, 1]) |
| rot = transform.get('rot', [0, 0, 0]) |
| |
| instance_info = { |
| 'model_uid': model_id, |
| 'category': asset.get('category', 'unknown'), |
| 'bbox': [ |
| pos[0], pos[1], pos[2], |
| size[0], size[1], size[2], |
| rot[0], rot[1], rot[2] |
| ] |
| } |
| instance_infos.append(instance_info) |
| |
| return instance_infos |
|
|
|
|
| def compose_one_scene(args: tuple[str, str, bool]) -> tuple[str, bool, str]: |
| """Compose GLB for a single scene. Returns (scene_name, success, message).""" |
| full_scene_name, scene_dir, skip_existing = args |
| |
| |
| glb_path = os.path.join(scene_dir, "glb_scene.glb") |
| if skip_existing and os.path.exists(glb_path) and os.path.getsize(glb_path) > 0: |
| return full_scene_name, True, "SKIPPED (already exists)" |
| |
| try: |
| |
| sys.path.insert(0, str(REPO_ROOT / "InternScenes" / "InternScenes_Real2Sim")) |
| from compose_scenes import SceneComposer, AssetMeshLoader |
| |
| |
| class CustomAssetMeshLoader(AssetMeshLoader): |
| def __init__(self): |
| self.asset_dir = ASSET_LIBRARY_DIR |
| self.obja_uid_2_rotation = json.load(open(os.path.join(ASSET_LIBRARY_DIR, "uid_2_angle.json"))) |
| self.pm_uid_2_origin_cate = json.load(open(os.path.join(ASSET_LIBRARY_DIR, "uid_2_origin_cate.json"))) |
| |
| |
| loader = CustomAssetMeshLoader() |
| composer = SceneComposer(asset_mesh_loader=loader) |
| |
| |
| layout_path = os.path.join(scene_dir, "layout.json") |
| with open(layout_path) as f: |
| layout_data = json.load(f) |
| |
| |
| instance_infos = convert_unified_layout_to_instance_infos(layout_data) |
| |
| if not instance_infos: |
| return full_scene_name, False, "No assets found in layout" |
| |
| |
| os.makedirs(scene_dir, exist_ok=True) |
| |
| |
| composer.compose_scene_from_instance_infos( |
| instance_infos, |
| glb_path, |
| use_texture=True, |
| bbox_data_key="bbox" |
| ) |
| |
| return full_scene_name, True, f"OK ({len(instance_infos)} assets)" |
| except Exception as e: |
| import traceback |
| return full_scene_name, False, f"{str(e)}\n{traceback.format_exc()}" |
|
|
|
|
| def compose_all_scenes(scenes: list[tuple[str, str]], max_workers: int = 4, skip_existing: bool = False) -> None: |
| """Compose GLB files for all scenes using multiprocessing.""" |
| print(f"Composing {len(scenes)} scenes with {max_workers} workers...") |
| if skip_existing: |
| print(" (skipping scenes with existing GLB files)") |
| |
| success_count = 0 |
| skip_count = 0 |
| fail_count = 0 |
| |
| |
| scenes_with_flag = [(name, path, skip_existing) for name, path in scenes] |
| |
| with ProcessPoolExecutor(max_workers=max_workers) as executor: |
| futures = {executor.submit(compose_one_scene, scene): scene[0] for scene in scenes_with_flag} |
| |
| with tqdm(total=len(futures), desc="Composing") as pbar: |
| for future in as_completed(futures): |
| scene_name, success, msg = future.result() |
| if success: |
| if "SKIPPED" in msg: |
| skip_count += 1 |
| else: |
| success_count += 1 |
| else: |
| fail_count += 1 |
| tqdm.write(f"Failed: {scene_name} - {msg[:200]}") |
| pbar.update(1) |
| |
| print(f"\nCompose: {success_count} success, {skip_count} skipped, {fail_count} failed") |
|
|
|
|
| def render_one_scene(args: tuple[str, str, bool]) -> tuple[str, bool, str]: |
| """Render top and diagonal views for a single scene.""" |
| full_scene_name, scene_dir, skip_existing = args |
| try: |
| glb_path = os.path.join(scene_dir, "glb_scene.glb") |
| |
| |
| topdown_output = os.path.join(scene_dir, "render_topdown.png") |
| diagonal_output = os.path.join(scene_dir, "render_diagonal.png") |
| |
| |
| if skip_existing: |
| topdown_exists = os.path.exists(topdown_output) and os.path.getsize(topdown_output) > 0 |
| diagonal_exists = os.path.exists(diagonal_output) and os.path.getsize(diagonal_output) > 0 |
| if topdown_exists and diagonal_exists: |
| return full_scene_name, True, "SKIPPED" |
| |
| |
| base_args = [ |
| sys.executable, BLENDER_RENDERER, |
| "--input", glb_path, |
| "--scene-y-up", |
| "--engine", "BLENDER_EEVEE", |
| "--samples", "128", |
| "--auto-crop", |
| "--crop-padding", "20", |
| ] |
| |
| |
| topdown_args = base_args + [ |
| "--output", topdown_output, |
| "--view-mode", "topdown", |
| "--topdown-height", "1.5", |
| "--topdown-scale", "1.2", |
| ] |
| result = subprocess.run(topdown_args, capture_output=True, text=True, timeout=120) |
| if result.returncode != 0: |
| return full_scene_name, False, f"Topdown render failed: {result.stderr[:200]}" |
| |
| |
| diagonal_args = base_args + [ |
| "--output", diagonal_output, |
| "--view-mode", "diagonal", |
| "--diagonal-distance", "1.2", |
| "--diagonal-height-offset", "0.1", |
| ] |
| result = subprocess.run(diagonal_args, capture_output=True, text=True, timeout=120) |
| if result.returncode != 0: |
| return full_scene_name, False, f"Diagonal render failed: {result.stderr[:200]}" |
| |
| return full_scene_name, True, "OK" |
| except subprocess.TimeoutExpired: |
| return full_scene_name, False, "Timeout" |
| except Exception as e: |
| return full_scene_name, False, str(e) |
|
|
|
|
| def render_all_scenes(scenes: list[tuple[str, str]], max_workers: int = 4, skip_existing: bool = False) -> None: |
| """Render all scenes that have GLB files using multiprocessing.""" |
| print(f"Checking {len(scenes)} scenes for rendering...") |
| |
| |
| scenes_to_render = [] |
| skipped_empty = 0 |
| skipped_missing = 0 |
| skipped_existing = 0 |
| |
| for full_scene_name, scene_dir in scenes: |
| glb_path = os.path.join(scene_dir, "glb_scene.glb") |
| if os.path.exists(glb_path): |
| if os.path.getsize(glb_path) > 0: |
| |
| if skip_existing: |
| topdown_output = os.path.join(scene_dir, "render_topdown.png") |
| diagonal_output = os.path.join(scene_dir, "render_diagonal.png") |
| if os.path.exists(topdown_output) and os.path.exists(diagonal_output): |
| skipped_existing += 1 |
| continue |
| scenes_to_render.append((full_scene_name, scene_dir)) |
| else: |
| skipped_empty += 1 |
| else: |
| skipped_missing += 1 |
| |
| print(f"Found {len(scenes_to_render)} scenes to render") |
| if skipped_existing > 0: |
| print(f" Skipped {skipped_existing} already rendered scenes") |
| if skipped_empty > 0: |
| print(f" Skipped {skipped_empty} empty/corrupted GLB files") |
| if skipped_missing > 0: |
| print(f" Skipped {skipped_missing} missing GLB files") |
| print(f"Using {max_workers} workers") |
| |
| if not scenes_to_render: |
| print("No scenes to render!") |
| return |
| |
| success_count = 0 |
| fail_count = 0 |
| |
| with ProcessPoolExecutor(max_workers=max_workers) as executor: |
| futures = { |
| executor.submit(render_one_scene, (scene[0], scene[1], skip_existing)): scene[0] |
| for scene in scenes_to_render |
| } |
| |
| with tqdm(total=len(futures), desc="Rendering") as pbar: |
| for future in as_completed(futures): |
| scene_name, success, msg = future.result() |
| if success: |
| success_count += 1 |
| else: |
| fail_count += 1 |
| tqdm.write(f"Failed: {scene_name} - {msg}") |
| pbar.update(1) |
| |
| print(f"\nRender: {success_count} success, {fail_count} failed") |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser(description="Batch process aligned scenes from different datasets") |
| parser.add_argument( |
| "--dataset", |
| choices=DATASETS + ["all"], |
| default="scannet", |
| help="Dataset to process (default: scannet)" |
| ) |
| parser.add_argument( |
| "--mode", |
| choices=["compose", "render", "all"], |
| default="all", |
| help="compose: generate GLB files, render: render views, all: both" |
| ) |
| parser.add_argument( |
| "--workers", |
| type=int, |
| default=max(1, cpu_count() // 2), |
| help=f"Number of parallel workers (default: {max(1, cpu_count() // 2)})" |
| ) |
| parser.add_argument( |
| "--limit", |
| type=int, |
| default=None, |
| help="Limit number of scenes to process (for testing)" |
| ) |
| parser.add_argument( |
| "--start", |
| type=int, |
| default=0, |
| help="Start index for scenes (for resuming)" |
| ) |
| parser.add_argument( |
| "--skip-existing", |
| action="store_true", |
| default=False, |
| help="Skip scenes that already have GLB/render files (for resuming)" |
| ) |
| args = parser.parse_args() |
| |
| |
| if args.dataset == "all": |
| datasets_to_process = DATASETS |
| else: |
| datasets_to_process = [args.dataset] |
| |
| for dataset in datasets_to_process: |
| print(f"\n{'='*60}") |
| print(f"Processing dataset: {dataset}") |
| print(f"{'='*60}") |
| |
| |
| all_scenes = get_all_scenes(dataset) |
| print(f"Found {len(all_scenes)} scenes") |
| |
| if not all_scenes: |
| print(f"No scenes found for {dataset}, skipping...") |
| continue |
| |
| |
| scenes = all_scenes[args.start:] |
| if args.limit: |
| scenes = scenes[:args.limit] |
| |
| if args.start > 0 or args.limit: |
| print(f"Processing scenes {args.start} to {args.start + len(scenes)}") |
| |
| if args.mode in ["compose", "all"]: |
| print("\n=== Step 1: Composing GLB files ===") |
| compose_all_scenes(scenes, max_workers=args.workers, skip_existing=args.skip_existing) |
| |
| if args.mode in ["render", "all"]: |
| print("\n=== Step 2: Rendering views ===") |
| render_all_scenes(scenes, max_workers=args.workers, skip_existing=args.skip_existing) |
| |
| print("\nAll done!") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|