SlekLi commited on
Commit
d2a9a7f
·
verified ·
1 Parent(s): f7596ef

Upload merge_gs.py

Browse files
Files changed (1) hide show
  1. merge_gs.py +378 -0
merge_gs.py ADDED
@@ -0,0 +1,378 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from plyfile import PlyData, PlyElement
3
+ from sklearn.cluster import AgglomerativeClustering
4
+ from scipy.spatial.transform import Rotation as R
5
+ import os
6
+
7
+
8
+ def read_ply(ply_path):
9
+ """读取3DGS的.ply文件"""
10
+ plydata = PlyData.read(ply_path)
11
+ vertex = plydata['vertex']
12
+
13
+ # 提取基本属性
14
+ positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1)
15
+
16
+ # 提取不透明度
17
+ opacities = vertex['opacity'][:, np.newaxis]
18
+
19
+ # 提取scale (3个轴)
20
+ scales = np.stack([vertex['scale_0'], vertex['scale_1'], vertex['scale_2']], axis=1)
21
+
22
+ # 提取rotation (四元数 wxyz或xyzw,需要确认格式)
23
+ rotations = np.stack([vertex['rot_0'], vertex['rot_1'], vertex['rot_2'], vertex['rot_3']], axis=1)
24
+
25
+ # 提取DC系数 (f_dc_0, f_dc_1, f_dc_2 对应RGB)
26
+ dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'], vertex['f_dc_2']], axis=1)
27
+
28
+ # 提取高阶SH系数 (假设存在)
29
+ sh_keys = [key for key in vertex.data.dtype.names if key.startswith('f_rest_')]
30
+ if sh_keys:
31
+ sh_rest = np.stack([vertex[key] for key in sh_keys], axis=1)
32
+ else:
33
+ sh_rest = None
34
+
35
+ return {
36
+ 'positions': positions,
37
+ 'opacities': opacities,
38
+ 'scales': scales,
39
+ 'rotations': rotations,
40
+ 'dc': dc,
41
+ 'sh_rest': sh_rest,
42
+ 'plydata': plydata # 保存原始数据用于后续保存
43
+ }
44
+
45
+
46
+ def quaternion_to_rotation_matrix(q):
47
+ """四元数转旋转矩阵,假设q是[w,x,y,z]或[x,y,z,w]格式"""
48
+ # 尝试两种常见格式
49
+ try:
50
+ # 格式1: [x,y,z,w]
51
+ rot = R.from_quat(q)
52
+ except:
53
+ # 格式2: [w,x,y,z]
54
+ rot = R.from_quat([q[1], q[2], q[3], q[0]])
55
+ return rot.as_matrix()
56
+
57
+
58
+ def compute_covariance(rotation, scale):
59
+ """从旋转和缩放计算协方差矩阵
60
+ Σ = R * S * S^T * R^T
61
+ """
62
+ R_mat = quaternion_to_rotation_matrix(rotation)
63
+ S_mat = np.diag(scale)
64
+ cov = R_mat @ S_mat @ S_mat.T @ R_mat.T
65
+ return cov
66
+
67
+
68
+ def covariance_to_rotation_scale(cov):
69
+ """从协方差矩阵分解得到旋转和缩放
70
+ 使用特征值分解: Σ = V * Λ * V^T
71
+ 其中 V 是旋转, sqrt(Λ) 是缩放
72
+ """
73
+ # 特征值分解
74
+ eigenvalues, eigenvectors = np.linalg.eigh(cov)
75
+
76
+ # 确保特征值为正
77
+ eigenvalues = np.maximum(eigenvalues, 1e-7)
78
+
79
+ # 缩放是特征值的平方根
80
+ scale = np.sqrt(eigenvalues)
81
+
82
+ # 旋转矩阵是特征向量
83
+ # 确保是右手坐标系
84
+ if np.linalg.det(eigenvectors) < 0:
85
+ eigenvectors[:, 0] *= -1
86
+
87
+ # 转换为四元数
88
+ rot = R.from_matrix(eigenvectors)
89
+ rotation = rot.as_quat() # [x,y,z,w]
90
+
91
+ return rotation, scale
92
+
93
+
94
+ def dc_to_rgb(dc):
95
+ """将DC系数转换为RGB (0阶球谐)"""
96
+ C0 = 0.28209479177387814
97
+ rgb = dc * C0 + 0.5
98
+ return np.clip(rgb, 0, 1)
99
+
100
+
101
+ def rgb_to_dc(rgb):
102
+ """将RGB转换回DC系数"""
103
+ C0 = 0.28209479177387814
104
+ dc = (rgb - 0.5) / C0
105
+ return dc
106
+
107
+
108
+ def build_octree(positions, max_points=5000):
109
+ """递归构建八叉树cell"""
110
+ cells = []
111
+
112
+ def subdivide(indices, bbox_min, bbox_max, depth=0):
113
+ if len(indices) <= max_points or depth > 10: # 添加最大深度限制
114
+ cells.append({
115
+ 'indices': indices,
116
+ 'bbox_min': bbox_min,
117
+ 'bbox_max': bbox_max
118
+ })
119
+ return
120
+
121
+ # 计算中心点
122
+ center = (bbox_min + bbox_max) / 2
123
+
124
+ # 8个子空间
125
+ for i in range(8):
126
+ x_flag = i & 1
127
+ y_flag = (i >> 1) & 1
128
+ z_flag = (i >> 2) & 1
129
+
130
+ sub_min = np.array([
131
+ center[0] if x_flag else bbox_min[0],
132
+ center[1] if y_flag else bbox_min[1],
133
+ center[2] if z_flag else bbox_min[2]
134
+ ])
135
+
136
+ sub_max = np.array([
137
+ bbox_max[0] if x_flag else center[0],
138
+ bbox_max[1] if y_flag else center[1],
139
+ bbox_max[2] if z_flag else center[2]
140
+ ])
141
+
142
+ # 找到在该子空间内的点
143
+ mask = np.all((positions[indices] >= sub_min) & (positions[indices] < sub_max), axis=1)
144
+ sub_indices = indices[mask]
145
+
146
+ if len(sub_indices) > 0:
147
+ subdivide(sub_indices, sub_min, sub_max, depth + 1)
148
+
149
+ # 初始边界框
150
+ bbox_min = positions.min(axis=0)
151
+ bbox_max = positions.max(axis=0)
152
+ all_indices = np.arange(len(positions))
153
+
154
+ subdivide(all_indices, bbox_min, bbox_max)
155
+
156
+ return cells
157
+
158
+
159
+ def cluster_and_merge_cell(data, cell_indices, bbox_min, bbox_max):
160
+ """对单个cell内的点进行聚类和合并"""
161
+ if len(cell_indices) < 4:
162
+ return None # 点数太少,不合并
163
+
164
+ # 计算聚类数量
165
+ n_clusters = max(1, len(cell_indices) // 4)
166
+
167
+ # 提取cell内的数据
168
+ positions = data['positions'][cell_indices]
169
+ dc = data['dc'][cell_indices]
170
+ opacities = data['opacities'][cell_indices]
171
+ scales = data['scales'][cell_indices]
172
+ rotations = data['rotations'][cell_indices]
173
+
174
+ # 计算cell的尺寸
175
+ cell_size = bbox_max - bbox_min
176
+ cell_size = np.maximum(cell_size, 1e-6) # 避免除零
177
+
178
+ # 归一化位置 (到[0,1])
179
+ norm_positions = (positions - bbox_min) / cell_size
180
+
181
+ # 将DC转为RGB并归一化到[0,1]
182
+ rgb = dc_to_rgb(dc)
183
+
184
+ # 构建聚类特征: [归一化位置 * 0.8权重, RGB * 0.2权重]
185
+ # 使用权重的平方根,因为距离计算会平方
186
+ features = np.concatenate([
187
+ norm_positions * np.sqrt(0.8),
188
+ rgb * np.sqrt(0.2)
189
+ ], axis=1)
190
+
191
+ # 执行层次聚类
192
+ clustering = AgglomerativeClustering(
193
+ n_clusters=n_clusters,
194
+ linkage='ward'
195
+ )
196
+ labels = clustering.fit_predict(features)
197
+
198
+ # 合并每个簇
199
+ merged_data = {
200
+ 'positions': [],
201
+ 'opacities': [],
202
+ 'scales': [],
203
+ 'rotations': [],
204
+ 'dc': [],
205
+ 'sh_rest': [] if data['sh_rest'] is not None else None
206
+ }
207
+
208
+ for cluster_id in range(n_clusters):
209
+ cluster_mask = labels == cluster_id
210
+ cluster_indices = np.where(cluster_mask)[0]
211
+
212
+ if len(cluster_indices) == 0:
213
+ continue
214
+
215
+ # 计算合并权重: opacity * scale体积
216
+ volumes = scales[cluster_indices].prod(axis=1, keepdims=True)
217
+ weights = opacities[cluster_indices] * volumes
218
+ weights_sum = weights.sum()
219
+ normalized_weights = weights / weights_sum
220
+
221
+ # 加权平均位置
222
+ merged_position = (positions[cluster_indices] * normalized_weights).sum(axis=0)
223
+
224
+ # 加权平均DC
225
+ merged_dc = (dc[cluster_indices] * normalized_weights).sum(axis=0)
226
+
227
+ # 加权平均高阶SH
228
+ if data['sh_rest'] is not None:
229
+ sh_rest_cell = data['sh_rest'][cell_indices]
230
+ merged_sh_rest = (sh_rest_cell[cluster_indices] * normalized_weights).sum(axis=0)
231
+
232
+ # 计算混合协方差矩阵
233
+ covariances = []
234
+ for idx in cluster_indices:
235
+ cov = compute_covariance(rotations[idx], scales[idx])
236
+ covariances.append(cov)
237
+ covariances = np.array(covariances)
238
+
239
+ # Σ_new = Σ w_i * (Σ_i + (μ_i - μ_new)(μ_i - μ_new)^T) / Σ w_i
240
+ merged_cov = np.zeros((3, 3))
241
+ for i, idx in enumerate(cluster_indices):
242
+ diff = positions[idx] - merged_position
243
+ outer = np.outer(diff, diff)
244
+ merged_cov += normalized_weights[i, 0] * (covariances[i] + outer)
245
+
246
+ # 从协方差矩阵分解得到旋转和缩放
247
+ merged_rotation, merged_scale = covariance_to_rotation_scale(merged_cov)
248
+
249
+ # 质量守恒: opacity_new * volume_new = Σ(opacity_i * volume_i)
250
+ merged_volume = merged_scale.prod()
251
+ merged_opacity = weights_sum / merged_volume if merged_volume > 1e-10 else opacities[cluster_indices].mean()
252
+ merged_opacity = np.clip(merged_opacity, 0, 1)
253
+
254
+ # 保存合并结果
255
+ merged_data['positions'].append(merged_position)
256
+ merged_data['opacities'].append(merged_opacity)
257
+ merged_data['scales'].append(merged_scale)
258
+ merged_data['rotations'].append(merged_rotation)
259
+ merged_data['dc'].append(merged_dc)
260
+ if data['sh_rest'] is not None:
261
+ merged_data['sh_rest'].append(merged_sh_rest)
262
+
263
+ # 转换为numpy数组
264
+ for key in merged_data:
265
+ if merged_data[key] is not None and len(merged_data[key]) > 0:
266
+ merged_data[key] = np.array(merged_data[key])
267
+
268
+ return merged_data
269
+
270
+
271
+ def merge_gaussians(ply_path, output_path):
272
+ """主函数: 读取、聚类、合并、保存"""
273
+ print("读取PLY文件...")
274
+ data = read_ply(ply_path)
275
+ n_original = len(data['positions'])
276
+ print(f"原始高斯点数: {n_original}")
277
+
278
+ print("构建八叉树...")
279
+ cells = build_octree(data['positions'], max_points=5000)
280
+ print(f"划分为 {len(cells)} 个cells")
281
+
282
+ print("对每个cell进行聚类和合并...")
283
+ all_merged_data = {
284
+ 'positions': [],
285
+ 'opacities': [],
286
+ 'scales': [],
287
+ 'rotations': [],
288
+ 'dc': [],
289
+ 'sh_rest': [] if data['sh_rest'] is not None else None
290
+ }
291
+
292
+ for i, cell in enumerate(cells):
293
+ if i % 100 == 0:
294
+ print(f"处理进度: {i}/{len(cells)}")
295
+
296
+ merged = cluster_and_merge_cell(
297
+ data,
298
+ cell['indices'],
299
+ cell['bbox_min'],
300
+ cell['bbox_max']
301
+ )
302
+
303
+ if merged is not None:
304
+ for key in all_merged_data:
305
+ if all_merged_data[key] is not None and len(merged[key]) > 0:
306
+ all_merged_data[key].append(merged[key])
307
+
308
+ # 合并所有cell的结果
309
+ print("合并所有cell的结果...")
310
+ final_data = {}
311
+ for key in all_merged_data:
312
+ if all_merged_data[key] is not None and len(all_merged_data[key]) > 0:
313
+ final_data[key] = np.concatenate(all_merged_data[key], axis=0)
314
+
315
+ n_merged = len(final_data['positions'])
316
+ print(f"合并后高斯点数: {n_merged}")
317
+ print(f"压缩率: {n_merged/n_original*100:.2f}%")
318
+
319
+ # 保存为PLY
320
+ print("保存PLY文件...")
321
+ save_ply(final_data, data['plydata'], output_path)
322
+ print(f"已保存到: {output_path}")
323
+
324
+
325
+ def save_ply(merged_data, original_plydata, output_path):
326
+ """保存合并后的数据为PLY格式"""
327
+ n_points = len(merged_data['positions'])
328
+
329
+ # 构建新的顶点数据
330
+ dtype_list = [
331
+ ('x', 'f4'), ('y', 'f4'), ('z', 'f4'),
332
+ ('opacity', 'f4'),
333
+ ('scale_0', 'f4'), ('scale_1', 'f4'), ('scale_2', 'f4'),
334
+ ('rot_0', 'f4'), ('rot_1', 'f4'), ('rot_2', 'f4'), ('rot_3', 'f4'),
335
+ ('f_dc_0', 'f4'), ('f_dc_1', 'f4'), ('f_dc_2', 'f4'),
336
+ ]
337
+
338
+ # 添加高阶SH
339
+ if merged_data['sh_rest'] is not None:
340
+ n_sh_rest = merged_data['sh_rest'].shape[1]
341
+ for i in range(n_sh_rest):
342
+ dtype_list.append((f'f_rest_{i}', 'f4'))
343
+
344
+ vertex_data = np.empty(n_points, dtype=dtype_list)
345
+
346
+ # 填充数据
347
+ vertex_data['x'] = merged_data['positions'][:, 0]
348
+ vertex_data['y'] = merged_data['positions'][:, 1]
349
+ vertex_data['z'] = merged_data['positions'][:, 2]
350
+ vertex_data['opacity'] = merged_data['opacities'].flatten()
351
+ vertex_data['scale_0'] = merged_data['scales'][:, 0]
352
+ vertex_data['scale_1'] = merged_data['scales'][:, 1]
353
+ vertex_data['scale_2'] = merged_data['scales'][:, 2]
354
+ vertex_data['rot_0'] = merged_data['rotations'][:, 0]
355
+ vertex_data['rot_1'] = merged_data['rotations'][:, 1]
356
+ vertex_data['rot_2'] = merged_data['rotations'][:, 2]
357
+ vertex_data['rot_3'] = merged_data['rotations'][:, 3]
358
+ vertex_data['f_dc_0'] = merged_data['dc'][:, 0]
359
+ vertex_data['f_dc_1'] = merged_data['dc'][:, 1]
360
+ vertex_data['f_dc_2'] = merged_data['dc'][:, 2]
361
+
362
+ if merged_data['sh_rest'] is not None:
363
+ for i in range(n_sh_rest):
364
+ vertex_data[f'f_rest_{i}'] = merged_data['sh_rest'][:, i]
365
+
366
+ # 创建PLY元素
367
+ vertex_element = PlyElement.describe(vertex_data, 'vertex')
368
+
369
+ # 保存
370
+ PlyData([vertex_element]).write(output_path)
371
+
372
+
373
+ # 使用示例
374
+ if __name__ == "__main__":
375
+ input_ply = "input.ply" # 输入文件路径
376
+ output_ply = "output_merged.ply" # 输出文件路径
377
+
378
+ merge_gaussians(input_ply, output_ply)