| import numpy as np
|
| from plyfile import PlyData, PlyElement
|
| from sklearn.cluster import AgglomerativeClustering
|
| from scipy.spatial.transform import Rotation as R
|
| import os
|
|
|
|
|
| def read_ply(ply_path):
|
| """读取3DGS的.ply文件"""
|
| plydata = PlyData.read(ply_path)
|
| vertex = plydata['vertex']
|
|
|
|
|
| positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1)
|
|
|
|
|
| opacities = vertex['opacity'][:, np.newaxis]
|
|
|
|
|
| scales = np.stack([vertex['scale_0'], vertex['scale_1'], vertex['scale_2']], axis=1)
|
|
|
|
|
| rotations = np.stack([vertex['rot_0'], vertex['rot_1'], vertex['rot_2'], vertex['rot_3']], axis=1)
|
|
|
|
|
| dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'], vertex['f_dc_2']], axis=1)
|
|
|
|
|
| sh_keys = [key for key in vertex.data.dtype.names if key.startswith('f_rest_')]
|
| if sh_keys:
|
| sh_rest = np.stack([vertex[key] for key in sh_keys], axis=1)
|
| else:
|
| sh_rest = None
|
|
|
| return {
|
| 'positions': positions,
|
| 'opacities': opacities,
|
| 'scales': scales,
|
| 'rotations': rotations,
|
| 'dc': dc,
|
| 'sh_rest': sh_rest,
|
| 'plydata': plydata
|
| }
|
|
|
|
|
| def quaternion_to_rotation_matrix(q):
|
| """四元数转旋转矩阵,假设q是[w,x,y,z]或[x,y,z,w]格式"""
|
|
|
| try:
|
|
|
| rot = R.from_quat(q)
|
| except:
|
|
|
| rot = R.from_quat([q[1], q[2], q[3], q[0]])
|
| return rot.as_matrix()
|
|
|
|
|
| def compute_covariance(rotation, scale):
|
| """从旋转和缩放计算协方差矩阵
|
| Σ = R * S * S^T * R^T
|
| """
|
| R_mat = quaternion_to_rotation_matrix(rotation)
|
| S_mat = np.diag(scale)
|
| cov = R_mat @ S_mat @ S_mat.T @ R_mat.T
|
| return cov
|
|
|
|
|
| def covariance_to_rotation_scale(cov):
|
| """从协方差矩阵分解得到旋转和缩放
|
| 使用特征值分解: Σ = V * Λ * V^T
|
| 其中 V 是旋转, sqrt(Λ) 是缩放
|
| """
|
|
|
| eigenvalues, eigenvectors = np.linalg.eigh(cov)
|
|
|
|
|
| eigenvalues = np.maximum(eigenvalues, 1e-7)
|
|
|
|
|
| scale = np.sqrt(eigenvalues)
|
|
|
|
|
|
|
| if np.linalg.det(eigenvectors) < 0:
|
| eigenvectors[:, 0] *= -1
|
|
|
|
|
| rot = R.from_matrix(eigenvectors)
|
| rotation = rot.as_quat()
|
|
|
| return rotation, scale
|
|
|
|
|
| def dc_to_rgb(dc):
|
| """将DC系数转换为RGB (0阶球谐)"""
|
| C0 = 0.28209479177387814
|
| rgb = dc * C0 + 0.5
|
| return np.clip(rgb, 0, 1)
|
|
|
|
|
| def rgb_to_dc(rgb):
|
| """将RGB转换回DC系数"""
|
| C0 = 0.28209479177387814
|
| dc = (rgb - 0.5) / C0
|
| return dc
|
|
|
|
|
| def build_octree(positions, max_points=5000):
|
| """递归构建八叉树cell"""
|
| cells = []
|
|
|
| def subdivide(indices, bbox_min, bbox_max, depth=0):
|
| if len(indices) <= max_points or depth > 10:
|
| cells.append({
|
| 'indices': indices,
|
| 'bbox_min': bbox_min,
|
| 'bbox_max': bbox_max
|
| })
|
| return
|
|
|
|
|
| center = (bbox_min + bbox_max) / 2
|
|
|
|
|
| for i in range(8):
|
| x_flag = i & 1
|
| y_flag = (i >> 1) & 1
|
| z_flag = (i >> 2) & 1
|
|
|
| sub_min = np.array([
|
| center[0] if x_flag else bbox_min[0],
|
| center[1] if y_flag else bbox_min[1],
|
| center[2] if z_flag else bbox_min[2]
|
| ])
|
|
|
| sub_max = np.array([
|
| bbox_max[0] if x_flag else center[0],
|
| bbox_max[1] if y_flag else center[1],
|
| bbox_max[2] if z_flag else center[2]
|
| ])
|
|
|
|
|
| mask = np.all((positions[indices] >= sub_min) & (positions[indices] < sub_max), axis=1)
|
| sub_indices = indices[mask]
|
|
|
| if len(sub_indices) > 0:
|
| subdivide(sub_indices, sub_min, sub_max, depth + 1)
|
|
|
|
|
| bbox_min = positions.min(axis=0)
|
| bbox_max = positions.max(axis=0)
|
| all_indices = np.arange(len(positions))
|
|
|
| subdivide(all_indices, bbox_min, bbox_max)
|
|
|
| return cells
|
|
|
|
|
| def cluster_and_merge_cell(data, cell_indices, bbox_min, bbox_max):
|
| """对单个cell内的点进行聚类和合并"""
|
| if len(cell_indices) < 4:
|
| return None
|
|
|
|
|
| n_clusters = max(1, len(cell_indices) // 4)
|
|
|
|
|
| positions = data['positions'][cell_indices]
|
| dc = data['dc'][cell_indices]
|
| opacities = data['opacities'][cell_indices]
|
| scales = data['scales'][cell_indices]
|
| rotations = data['rotations'][cell_indices]
|
|
|
|
|
| cell_size = bbox_max - bbox_min
|
| cell_size = np.maximum(cell_size, 1e-6)
|
|
|
|
|
| norm_positions = (positions - bbox_min) / cell_size
|
|
|
|
|
| rgb = dc_to_rgb(dc)
|
|
|
|
|
|
|
| features = np.concatenate([
|
| norm_positions * np.sqrt(0.8),
|
| rgb * np.sqrt(0.2)
|
| ], axis=1)
|
|
|
|
|
| clustering = AgglomerativeClustering(
|
| n_clusters=n_clusters,
|
| linkage='ward'
|
| )
|
| labels = clustering.fit_predict(features)
|
|
|
|
|
| merged_data = {
|
| 'positions': [],
|
| 'opacities': [],
|
| 'scales': [],
|
| 'rotations': [],
|
| 'dc': [],
|
| 'sh_rest': [] if data['sh_rest'] is not None else None
|
| }
|
|
|
| for cluster_id in range(n_clusters):
|
| cluster_mask = labels == cluster_id
|
| cluster_indices = np.where(cluster_mask)[0]
|
|
|
| if len(cluster_indices) == 0:
|
| continue
|
|
|
|
|
| volumes = scales[cluster_indices].prod(axis=1, keepdims=True)
|
| weights = opacities[cluster_indices] * volumes
|
| weights_sum = weights.sum()
|
| normalized_weights = weights / weights_sum
|
|
|
|
|
| merged_position = (positions[cluster_indices] * normalized_weights).sum(axis=0)
|
|
|
|
|
| merged_dc = (dc[cluster_indices] * normalized_weights).sum(axis=0)
|
|
|
|
|
| if data['sh_rest'] is not None:
|
| sh_rest_cell = data['sh_rest'][cell_indices]
|
| merged_sh_rest = (sh_rest_cell[cluster_indices] * normalized_weights).sum(axis=0)
|
|
|
|
|
| covariances = []
|
| for idx in cluster_indices:
|
| cov = compute_covariance(rotations[idx], scales[idx])
|
| covariances.append(cov)
|
| covariances = np.array(covariances)
|
|
|
|
|
| merged_cov = np.zeros((3, 3))
|
| for i, idx in enumerate(cluster_indices):
|
| diff = positions[idx] - merged_position
|
| outer = np.outer(diff, diff)
|
| merged_cov += normalized_weights[i, 0] * (covariances[i] + outer)
|
|
|
|
|
| merged_rotation, merged_scale = covariance_to_rotation_scale(merged_cov)
|
|
|
|
|
| merged_volume = merged_scale.prod()
|
| merged_opacity = weights_sum / merged_volume if merged_volume > 1e-10 else opacities[cluster_indices].mean()
|
| merged_opacity = np.clip(merged_opacity, 0, 1)
|
|
|
|
|
| merged_data['positions'].append(merged_position)
|
| merged_data['opacities'].append(merged_opacity)
|
| merged_data['scales'].append(merged_scale)
|
| merged_data['rotations'].append(merged_rotation)
|
| merged_data['dc'].append(merged_dc)
|
| if data['sh_rest'] is not None:
|
| merged_data['sh_rest'].append(merged_sh_rest)
|
|
|
|
|
| for key in merged_data:
|
| if merged_data[key] is not None and len(merged_data[key]) > 0:
|
| merged_data[key] = np.array(merged_data[key])
|
|
|
| return merged_data
|
|
|
|
|
| def merge_gaussians(ply_path, output_path):
|
| """主函数: 读取、聚类、合并、保存"""
|
| print("读取PLY文件...")
|
| data = read_ply(ply_path)
|
| n_original = len(data['positions'])
|
| print(f"原始高斯点数: {n_original}")
|
|
|
| print("构建八叉树...")
|
| cells = build_octree(data['positions'], max_points=5000)
|
| print(f"划分为 {len(cells)} 个cells")
|
|
|
| print("对每个cell进行聚类和合并...")
|
| all_merged_data = {
|
| 'positions': [],
|
| 'opacities': [],
|
| 'scales': [],
|
| 'rotations': [],
|
| 'dc': [],
|
| 'sh_rest': [] if data['sh_rest'] is not None else None
|
| }
|
|
|
| for i, cell in enumerate(cells):
|
| if i % 100 == 0:
|
| print(f"处理进度: {i}/{len(cells)}")
|
|
|
| merged = cluster_and_merge_cell(
|
| data,
|
| cell['indices'],
|
| cell['bbox_min'],
|
| cell['bbox_max']
|
| )
|
|
|
| if merged is not None:
|
| for key in all_merged_data:
|
| if all_merged_data[key] is not None and len(merged[key]) > 0:
|
| all_merged_data[key].append(merged[key])
|
|
|
|
|
| print("合并所有cell的结果...")
|
| final_data = {}
|
| for key in all_merged_data:
|
| if all_merged_data[key] is not None and len(all_merged_data[key]) > 0:
|
| final_data[key] = np.concatenate(all_merged_data[key], axis=0)
|
|
|
| n_merged = len(final_data['positions'])
|
| print(f"合并后高斯点数: {n_merged}")
|
| print(f"压缩率: {n_merged/n_original*100:.2f}%")
|
|
|
|
|
| print("保存PLY文件...")
|
| save_ply(final_data, data['plydata'], output_path)
|
| print(f"已保存到: {output_path}")
|
|
|
|
|
| def save_ply(merged_data, original_plydata, output_path):
|
| """保存合并后的数据为PLY格式"""
|
| n_points = len(merged_data['positions'])
|
|
|
|
|
| dtype_list = [
|
| ('x', 'f4'), ('y', 'f4'), ('z', 'f4'),
|
| ('opacity', 'f4'),
|
| ('scale_0', 'f4'), ('scale_1', 'f4'), ('scale_2', 'f4'),
|
| ('rot_0', 'f4'), ('rot_1', 'f4'), ('rot_2', 'f4'), ('rot_3', 'f4'),
|
| ('f_dc_0', 'f4'), ('f_dc_1', 'f4'), ('f_dc_2', 'f4'),
|
| ]
|
|
|
|
|
| if merged_data['sh_rest'] is not None:
|
| n_sh_rest = merged_data['sh_rest'].shape[1]
|
| for i in range(n_sh_rest):
|
| dtype_list.append((f'f_rest_{i}', 'f4'))
|
|
|
| vertex_data = np.empty(n_points, dtype=dtype_list)
|
|
|
|
|
| vertex_data['x'] = merged_data['positions'][:, 0]
|
| vertex_data['y'] = merged_data['positions'][:, 1]
|
| vertex_data['z'] = merged_data['positions'][:, 2]
|
| vertex_data['opacity'] = merged_data['opacities'].flatten()
|
| vertex_data['scale_0'] = merged_data['scales'][:, 0]
|
| vertex_data['scale_1'] = merged_data['scales'][:, 1]
|
| vertex_data['scale_2'] = merged_data['scales'][:, 2]
|
| vertex_data['rot_0'] = merged_data['rotations'][:, 0]
|
| vertex_data['rot_1'] = merged_data['rotations'][:, 1]
|
| vertex_data['rot_2'] = merged_data['rotations'][:, 2]
|
| vertex_data['rot_3'] = merged_data['rotations'][:, 3]
|
| vertex_data['f_dc_0'] = merged_data['dc'][:, 0]
|
| vertex_data['f_dc_1'] = merged_data['dc'][:, 1]
|
| vertex_data['f_dc_2'] = merged_data['dc'][:, 2]
|
|
|
| if merged_data['sh_rest'] is not None:
|
| for i in range(n_sh_rest):
|
| vertex_data[f'f_rest_{i}'] = merged_data['sh_rest'][:, i]
|
|
|
|
|
| vertex_element = PlyElement.describe(vertex_data, 'vertex')
|
|
|
|
|
| PlyData([vertex_element]).write(output_path)
|
|
|
|
|
|
|
| if __name__ == "__main__":
|
| input_ply = "input.ply"
|
| output_ply = "output_merged.ply"
|
|
|
| merge_gaussians(input_ply, output_ply) |