""" 3DGS Codebook Builder ===================== 使用 KMeans 对 3D Gaussian Splatting 模型的四类特征分别构建 codebook: - scale (3维) → 16384 个离散索引 - rotation (4维) → 16384 个离散索引 - DC (3维) → 4096 个离散索引 - SH rest (45维) → 4096 个离散索引 每个 codebook 单独保存为 .npz 文件,包含: - codebook : (K, D) float32 —— 聚类中心 - indices : (N,) int32 —— 每个高斯点对应的索引 """ import os import argparse import numpy as np from plyfile import PlyData from sklearn.cluster import MiniBatchKMeans import time # ───────────────────────────────────────────── # 1. PLY 读取 # ───────────────────────────────────────────── def read_ply(ply_path: str) -> dict: """读取 3DGS .ply 文件,返回各属性 numpy 数组。""" plydata = PlyData.read(ply_path) vertex = plydata['vertex'] positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1) # (N, 3) opacities = vertex['opacity'][:, np.newaxis] # (N, 1) scales = np.stack([vertex['scale_0'], vertex['scale_1'], vertex['scale_2']], axis=1) # (N, 3) rotations = np.stack([vertex['rot_0'], vertex['rot_1'], vertex['rot_2'], vertex['rot_3']], axis=1) # (N, 4) dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'], vertex['f_dc_2']], axis=1) # (N, 3) sh_keys = sorted( [k for k in vertex.data.dtype.names if k.startswith('f_rest_')], key=lambda s: int(s.split('_')[-1]) ) sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \ if sh_keys else None # (N, 45) # filter_3D 是可选字段(部分版本有,部分没有) filter_3d = None if 'filter_3D' in vertex.data.dtype.names: filter_3d = vertex['filter_3D'][:, np.newaxis] # (N, 1) print(f"[read_ply] 读取完成:{positions.shape[0]} 个高斯点") if sh_rest is not None: print(f" SH rest 维度:{sh_rest.shape[1]} " f"(期望 45 = 15 球谐系数 × 3 通道)") return { 'positions': positions, 'opacities': opacities, 'scales': scales, 'rotations': rotations, 'dc': dc, 'sh_rest': sh_rest, 'filter_3d': filter_3d, 'plydata': plydata, } # ───────────────────────────────────────────── # 2. KMeans 聚类(MiniBatchKMeans,速度快) # ───────────────────────────────────────────── def build_codebook( features: np.ndarray, n_clusters: int, name: str, random_state: int = 42, batch_size: int = 65536, max_iter: int = 300, ) -> tuple[np.ndarray, np.ndarray]: """ 对 features (N, D) 执行 MiniBatchKMeans,返回: codebook : (K, D) float32 indices : (N,) int32 """ N, D = features.shape # 若点数少于 cluster 数,直接把每个点当一个 cluster K = min(n_clusters, N) if K < n_clusters: print(f"[{name}] 警告:高斯点数 ({N}) < 目标 cluster 数 ({n_clusters})," f"自动调整为 K={K}") print(f"[{name}] 开始 KMeans:N={N}, D={D}, K={K} ...") t0 = time.time() kmeans = MiniBatchKMeans( n_clusters=K, batch_size=min(batch_size, N), max_iter=max_iter, random_state=random_state, n_init=3, verbose=0, ) kmeans.fit(features.astype(np.float32)) codebook = kmeans.cluster_centers_.astype(np.float32) # (K, D) indices = kmeans.labels_.astype(np.int32) # (N,) elapsed = time.time() - t0 inertia = kmeans.inertia_ print(f"[{name}] 完成!耗时 {elapsed:.1f}s | inertia={inertia:.4f}") print(f" codebook shape: {codebook.shape} | " f"索引范围: [{indices.min()}, {indices.max()}]") return codebook, indices # ───────────────────────────────────────────── # 3. 保存单个 codebook # ───────────────────────────────────────────── def save_codebook( save_dir: str, name: str, codebook: np.ndarray, indices: np.ndarray, ) -> None: """将 codebook 和 indices 存为 _codebook.npz。""" os.makedirs(save_dir, exist_ok=True) out_path = os.path.join(save_dir, f"{name}_codebook.npz") np.savez_compressed(out_path, codebook=codebook, indices=indices) size_mb = os.path.getsize(out_path) / 1024 / 1024 print(f"[{name}] 已保存 → {out_path} ({size_mb:.2f} MB)") # ───────────────────────────────────────────── # 4. 主流程 # ───────────────────────────────────────────── CODEBOOK_CONFIG = { # name n_clusters 'scale': 16384, 'rotation': 16384, 'dc': 4096, 'sh': 4096, } def build_all_codebooks( ply_path: str, save_dir: str, random_state: int = 42, ) -> dict: """ 读取 PLY → 对四类特征分别聚类 → 分开保存。 返回字典: { 'scale': (codebook_array, indices_array), 'rotation': ..., 'dc': ..., 'sh': ..., } """ # ── 读取数据 ────────────────────────────── data = read_ply(ply_path) scales = data['scales'] # (N, 3) rotations = data['rotations'] # (N, 4) dc = data['dc'] # (N, 3) sh_rest = data['sh_rest'] # (N, 45) 已去除 DC 的 SH if sh_rest is None: raise ValueError("PLY 文件中未找到 f_rest_* 字段,无法构建 SH codebook。") feature_map = { 'scale': scales, 'rotation': rotations, 'dc': dc, 'sh': sh_rest, # SH codebook 使用去掉 DC 的 45 维高频分量 } # ── 逐一聚类并保存 ──────────────────────── results = {} for name, n_clusters in CODEBOOK_CONFIG.items(): features = feature_map[name] print(f"\n{'='*55}") print(f" 构建 [{name}] codebook | 特征维度: {features.shape[1]}" f" | 目标 K: {n_clusters}") print(f"{'='*55}") codebook, indices = build_codebook( features, n_clusters=n_clusters, name=name, random_state=random_state, ) save_codebook(save_dir, name, codebook, indices) results[name] = (codebook, indices) print(f"\n{'='*55}") print(" 所有 codebook 构建完毕!") print(f" 输出目录:{os.path.abspath(save_dir)}") print(f"{'='*55}") return results # ───────────────────────────────────────────── # 5. 验证:从 codebook 重建特征并计算误差 # ───────────────────────────────────────────── def evaluate_codebooks( ply_path: str, save_dir: str, ) -> None: """ 加载已保存的四个 codebook,重建特征, 计算每个维度的均方根误差(RMSE)。 """ data = read_ply(ply_path) feature_map = { 'scale': data['scales'], 'rotation': data['rotations'], 'dc': data['dc'], 'sh': data['sh_rest'], } print("\n[评估] 重建误差(RMSE):") for name in CODEBOOK_CONFIG: path = os.path.join(save_dir, f"{name}_codebook.npz") if not os.path.exists(path): print(f" [{name}] 文件不存在,跳过") continue npz = np.load(path) codebook = npz['codebook'] # (K, D) indices = npz['indices'] # (N,) original = feature_map[name].astype(np.float32) reconstructed = codebook[indices] # (N, D) rmse = np.sqrt(np.mean((original - reconstructed) ** 2)) max_err = np.abs(original - reconstructed).max() print(f" [{name:8s}] K={codebook.shape[0]:6d} D={codebook.shape[1]:3d}" f" RMSE={rmse:.6f} MaxErr={max_err:.6f}") # ───────────────────────────────────────────── # 6. CLI 入口 # ───────────────────────────────────────────── def parse_args(): parser = argparse.ArgumentParser( description="为 3DGS .ply 文件构建四个 KMeans codebook" ) parser.add_argument('ply_path', type=str,default="./merge/original_3dgs.ply", help='输入的 3DGS .ply 文件路径') parser.add_argument('--save_dir', type=str, default='./codebooks', help='codebook 保存目录(默认:./codebooks)') parser.add_argument('--seed', type=int, default=42, help='随机种子(默认:42)') parser.add_argument('--evaluate', action='store_true', help='构建完成后计算 RMSE 重建误差') return parser.parse_args() if __name__ == '__main__': args = parse_args() build_all_codebooks( ply_path=args.ply_path, save_dir=args.save_dir, random_state=args.seed, ) if args.evaluate: evaluate_codebooks( ply_path=args.ply_path, save_dir=args.save_dir, )