SlekLi commited on
Commit
5b9b1ed
ยท
verified ยท
1 Parent(s): 16c1081

Upload quantize.py

Browse files
Files changed (1) hide show
  1. quantize.py +314 -0
quantize.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ 3DGS Codebook Quantizer
3
+ ========================
4
+ ไฝฟ็”จๅทฒ่ฎญ็ปƒๅฅฝ็š„ codebook ๅฏนๆ–ฐ็š„ 3DGS .ply ๆ–‡ไปถ่ฟ›่กŒ้‡ๅŒ–๏ผŒ
5
+ ๅฐ†่ฟž็ปญ็‰นๅพๆ˜ ๅฐ„ไธบ็ฆปๆ•ฃ็ดขๅผ•๏ผŒๅนถๅฏ้€‰ๅœฐ้‡ๅปบ้‡ๅŒ–ๅŽ็š„็‰นๅพๅ†™ๅ›ž .plyใ€‚
6
+
7
+ ่พ“ๅ‡บ๏ผš
8
+ <scene>_quantized.npz โ€”โ€” ๅ››็ฑป็ดขๅผ• + ้‡ๅปบ่ฏฏๅทฎ็ปŸ่ฎก
9
+ <scene>_quantized.ply โ€”โ€” ๏ผˆๅฏ้€‰๏ผ‰็”จ codebook ้‡ๅปบ็‰นๅพๅŽๅ†™ๅ›ž็š„ๆ–ฐ .ply
10
+ """
11
+
12
+ import os
13
+ import argparse
14
+ import numpy as np
15
+ from plyfile import PlyData, PlyElement
16
+ import time
17
+
18
+
19
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
20
+ # 1. PLY ่ฏปๅ–๏ผˆๅค็”จ่ฎญ็ปƒๆ—ถ็š„่ฏปๆณ•๏ผ‰
21
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
22
+
23
+ def read_ply(ply_path: str) -> dict:
24
+ plydata = PlyData.read(ply_path)
25
+ vertex = plydata['vertex']
26
+
27
+ positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1)
28
+ opacities = vertex['opacity'][:, np.newaxis]
29
+ scales = np.stack([vertex['scale_0'], vertex['scale_1'],
30
+ vertex['scale_2']], axis=1)
31
+ rotations = np.stack([vertex['rot_0'], vertex['rot_1'],
32
+ vertex['rot_2'], vertex['rot_3']], axis=1)
33
+ dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'],
34
+ vertex['f_dc_2']], axis=1)
35
+
36
+ sh_keys = sorted(
37
+ [k for k in vertex.data.dtype.names if k.startswith('f_rest_')],
38
+ key=lambda s: int(s.split('_')[-1])
39
+ )
40
+ sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \
41
+ if sh_keys else None
42
+
43
+ filter_3d = None
44
+ if 'filter_3D' in vertex.data.dtype.names:
45
+ filter_3d = vertex['filter_3D'][:, np.newaxis]
46
+
47
+ print(f"[read_ply] {os.path.basename(ply_path)}๏ผš{positions.shape[0]} ไธช้ซ˜ๆ–ฏ็‚น")
48
+ return {
49
+ 'positions': positions,
50
+ 'opacities': opacities,
51
+ 'scales': scales,
52
+ 'rotations': rotations,
53
+ 'dc': dc,
54
+ 'sh_rest': sh_rest,
55
+ 'filter_3d': filter_3d,
56
+ 'plydata': plydata,
57
+ 'sh_keys': sh_keys,
58
+ }
59
+
60
+
61
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
62
+ # 2. ๅŠ ่ฝฝ codebook
63
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
64
+
65
+ def load_codebook(codebook_dir: str, name: str):
66
+ """
67
+ ๅŠ ่ฝฝๅ•ไธช codebook .npz๏ผŒ่ฟ”ๅ›ž codebook ็Ÿฉ้˜ต (K, D)ใ€‚
68
+ ่ฎญ็ปƒๆ—ถไฟๅญ˜็š„ indices ๅฑžไบŽ่ฎญ็ปƒ้›†๏ผŒ้‡ๅŒ–ๆ—ถไธไฝฟ็”จใ€‚
69
+ """
70
+ path = os.path.join(codebook_dir, f"{name}_codebook.npz")
71
+ if not os.path.exists(path):
72
+ raise FileNotFoundError(f"ๆ‰พไธๅˆฐ codebook ๆ–‡ไปถ๏ผš{path}")
73
+ npz = np.load(path)
74
+ codebook = npz['codebook'].astype(np.float32) # (K, D)
75
+ print(f"[load] {name}_codebook๏ผšK={codebook.shape[0]}, D={codebook.shape[1]}")
76
+ return codebook
77
+
78
+
79
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
80
+ # 3. ๆœ€่ฟ‘้‚ป้‡ๅŒ–๏ผˆๆ ธๅฟƒ๏ผ‰
81
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
82
+
83
+ def quantize(features: np.ndarray, codebook: np.ndarray, name: str,
84
+ batch_size: int = 65536):
85
+ """
86
+ ๅฏน features (N, D) ๅœจ codebook (K, D) ไธญๅšๆœ€่ฟ‘้‚ปๆœ็ดขใ€‚
87
+ ้‡‡็”จๅˆ†ๆ‰น่ฎก็ฎ—้ฟๅ…ไธ€ๆฌกๆ€งๆž„้€  (N, K) ็š„ๅทจๅž‹็Ÿฉ้˜ตๆ’‘็ˆ†ๅ†…ๅญ˜ใ€‚
88
+
89
+ ่ฟ”ๅ›ž๏ผš
90
+ indices : (N,) int32 ๆฏไธช็‚นๅฏนๅบ”็š„ codebook ็ดขๅผ•
91
+ reconstructed: (N, D) float32 ้‡ๅŒ–ๅŽ้‡ๅปบ็š„็‰นๅพ
92
+ """
93
+ features = features.astype(np.float32)
94
+ N, D = features.shape
95
+ K = codebook.shape[0]
96
+ indices = np.empty(N, dtype=np.int32)
97
+
98
+ # โ”€โ”€ ๅˆ†ๆ‰นๆœ€่ฟ‘้‚ป โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
99
+ # ๅˆฉ็”จๅฑ•ๅผ€็š„ L2 ่ท็ฆปๅ…ฌๅผ๏ผš
100
+ # ||x - c||^2 = ||x||^2 + ||c||^2 - 2 * x @ c^T
101
+ cb_norm2 = np.sum(codebook ** 2, axis=1) # (K,)
102
+
103
+ t0 = time.time()
104
+ for start in range(0, N, batch_size):
105
+ end = min(start + batch_size, N)
106
+ feat = features[start:end] # (B, D)
107
+
108
+ feat_norm2 = np.sum(feat ** 2, axis=1, keepdims=True) # (B, 1)
109
+ # (B, K) ่ท็ฆป็Ÿฉ้˜ต
110
+ dist2 = feat_norm2 + cb_norm2[np.newaxis, :] \
111
+ - 2.0 * (feat @ codebook.T)
112
+ indices[start:end] = np.argmin(dist2, axis=1)
113
+
114
+ elapsed = time.time() - t0
115
+ reconstructed = codebook[indices] # (N, D)
116
+
117
+ # โ”€โ”€ ่ฏฏๅทฎ็ปŸ่ฎก โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
118
+ diff = features - reconstructed
119
+ rmse = float(np.sqrt(np.mean(diff ** 2)))
120
+ max_e = float(np.abs(diff).max())
121
+ usage = len(np.unique(indices)) # ๅฎž้™…็”จๅˆฐๅคšๅฐ‘ไธช cluster
122
+
123
+ print(f"[{name:8s}] ้‡ๅŒ–ๅฎŒๆˆ {elapsed:.1f}s | "
124
+ f"RMSE={rmse:.6f} MaxErr={max_e:.6f} "
125
+ f"ไฝฟ็”จ {usage}/{K} ไธช cluster "
126
+ f"({100*usage/K:.1f}%)")
127
+
128
+ return indices, reconstructed, {'rmse': rmse, 'max_err': max_e, 'cluster_usage': usage}
129
+
130
+
131
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
132
+ # 4. ้‡ๅŒ–ๅ…จ้ƒจ็‰นๅพ
133
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
134
+
135
+ def quantize_all(data: dict, codebook_dir: str):
136
+ """
137
+ ๅŠ ่ฝฝๅ››ไธช codebook๏ผŒๅฏนๆ–ฐๅœบๆ™ฏ็š„้ซ˜ๆ–ฏ็‚น้€ไธ€้‡ๅŒ–ใ€‚
138
+
139
+ ่ฟ”ๅ›ž๏ผš
140
+ results dict {name: {'indices', 'reconstructed', 'stats'}}
141
+ codebooks dict {name: np.ndarray}
142
+ """
143
+ feature_map = {
144
+ 'scale': data['scales'],
145
+ 'rotation': data['rotations'],
146
+ 'dc': data['dc'],
147
+ 'sh': data['sh_rest'],
148
+ }
149
+
150
+ if data['sh_rest'] is None:
151
+ raise ValueError("PLY ไธญๆ—  f_rest_* ๅญ—ๆฎต๏ผŒๆ— ๆณ•้‡ๅŒ– SHใ€‚")
152
+
153
+ results = {}
154
+ codebooks = {}
155
+ for name, features in feature_map.items():
156
+ print(f"\n{'='*55}")
157
+ print(f" ้‡ๅŒ– [{name}] ็‰นๅพ็ปดๅบฆ: {features.shape[1]}")
158
+ print(f"{'='*55}")
159
+
160
+ cb = load_codebook(codebook_dir, name)
161
+ codebooks[name] = cb
162
+
163
+ idx, recon, stats = quantize(features, cb, name)
164
+ results[name] = {
165
+ 'indices': idx,
166
+ 'reconstructed': recon,
167
+ 'stats': stats,
168
+ }
169
+
170
+ return results, codebooks
171
+
172
+
173
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
174
+ # 5. ไฟๅญ˜้‡ๅŒ–็ป“ๆžœ๏ผˆ็ดขๅผ• + ็ปŸ่ฎก๏ผ‰
175
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
176
+
177
+ def save_quantized(save_path: str, data: dict, results: dict) -> None:
178
+ """
179
+ ไฟๅญ˜้‡ๅŒ–ๅŽ็š„ๅ››็ฑป็ดขๅผ•ๅ’Œ็ปŸ่ฎกไฟกๆฏๅˆฐๅ•ไธช .npzใ€‚
180
+
181
+ ๆ–‡ไปถๅ†…ๅฎน๏ผš
182
+ scale_indices (N,) int32
183
+ rotation_indices (N,) int32
184
+ dc_indices (N,) int32
185
+ sh_indices (N,) int32
186
+ positions (N, 3) float32 ๅŽŸๅง‹ๅๆ ‡๏ผˆๆ–นไพฟๅŽ็ปญๅฏน้ฝ๏ผ‰
187
+ opacities (N, 1) float32
188
+ """
189
+ save_dict = {
190
+ 'positions': data['positions'].astype(np.float32),
191
+ 'opacities': data['opacities'].astype(np.float32),
192
+ 'scale_indices': results['scale']['indices'],
193
+ 'rotation_indices': results['rotation']['indices'],
194
+ 'dc_indices': results['dc']['indices'],
195
+ 'sh_indices': results['sh']['indices'],
196
+ }
197
+ np.savez_compressed(save_path, **save_dict)
198
+ size_mb = os.path.getsize(save_path) / 1024 / 1024
199
+ print(f"\n[ไฟๅญ˜] ้‡ๅŒ–็ดขๅผ• โ†’ {save_path} ({size_mb:.2f} MB)")
200
+
201
+
202
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
203
+ # 6. ๏ผˆๅฏ้€‰๏ผ‰ๅ†™ๅ›ž้‡ๅŒ–้‡ๅปบ็š„ .ply
204
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
205
+
206
+ def save_reconstructed_ply(
207
+ save_path: str,
208
+ data: dict,
209
+ results: dict,
210
+ ) -> None:
211
+ """
212
+ ็”จ codebook ้‡ๅปบ็š„็‰นๅพๆ›ฟๆขๅŽŸๅง‹ๅ€ผ๏ผŒๅ†™ๅ‡บๆ–ฐ็š„ .ply ๆ–‡ไปถใ€‚
213
+ positions ๅ’Œ opacities ไฟๆŒไธๅ˜๏ผˆๆœช้‡ๅŒ–๏ผ‰ใ€‚
214
+ """
215
+ plydata = data['plydata']
216
+ vertex = plydata['vertex']
217
+ sh_keys = data['sh_keys']
218
+
219
+ # โ”€โ”€ ๅ–ๅ‡บ้‡ๅปบๅ€ผ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
220
+ scales_r = results['scale']['reconstructed'] # (N, 3)
221
+ rotations_r = results['rotation']['reconstructed'] # (N, 4)
222
+ dc_r = results['dc']['reconstructed'] # (N, 3)
223
+ sh_r = results['sh']['reconstructed'] # (N, 45)
224
+
225
+ # โ”€โ”€ ไฟฎๆ”น vertex ๆ•ฐ็ป„ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
226
+ # ๆณจๆ„๏ผšvertex.data ๆ˜ฏ็ป“ๆž„ๅŒ– numpy ๆ•ฐ็ป„๏ผŒ็›ดๆŽฅๆŒ‰ๅญ—ๆฎต่ต‹ๅ€ผ
227
+ arr = vertex.data.copy()
228
+
229
+ arr['scale_0'] = scales_r[:, 0]
230
+ arr['scale_1'] = scales_r[:, 1]
231
+ arr['scale_2'] = scales_r[:, 2]
232
+
233
+ arr['rot_0'] = rotations_r[:, 0]
234
+ arr['rot_1'] = rotations_r[:, 1]
235
+ arr['rot_2'] = rotations_r[:, 2]
236
+ arr['rot_3'] = rotations_r[:, 3]
237
+
238
+ arr['f_dc_0'] = dc_r[:, 0]
239
+ arr['f_dc_1'] = dc_r[:, 1]
240
+ arr['f_dc_2'] = dc_r[:, 2]
241
+
242
+ for i, key in enumerate(sh_keys):
243
+ arr[key] = sh_r[:, i]
244
+
245
+ # โ”€โ”€ ๅ†™ๅ‡บๆ–ฐ ply โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
246
+ new_vertex = PlyElement.describe(arr, 'vertex')
247
+ new_plydata = PlyData([new_vertex], text=plydata.text)
248
+ new_plydata.write(save_path)
249
+
250
+ size_mb = os.path.getsize(save_path) / 1024 / 1024
251
+ print(f"[ไฟๅญ˜] ้‡ๅปบ .ply โ†’ {save_path} ({size_mb:.2f} MB)")
252
+
253
+
254
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
255
+ # 7. ๆ‰“ๅฐๆฑ‡ๆ€ป็ปŸ่ฎก
256
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
257
+
258
+ def print_summary(results: dict) -> None:
259
+ print(f"\n{'='*55}")
260
+ print(f" ้‡ๅŒ–ๆฑ‡ๆ€ป")
261
+ print(f"{'='*55}")
262
+ print(f" {'็‰นๅพ':<10} {'RMSE':>10} {'MaxErr':>10} {'Clusterไฝฟ็”จ็އ':>14}")
263
+ print(f" {'-'*46}")
264
+ for name, res in results.items():
265
+ s = res['stats']
266
+ print(f" {name:<10} {s['rmse']:>10.6f} {s['max_err']:>10.6f} "
267
+ f" {s['cluster_usage']:>5} / {len(np.unique(res['indices'])):>5}"
268
+ f" ({100*s['cluster_usage']/s['cluster_usage']:.0f}%)")
269
+ print(f"{'='*55}")
270
+
271
+
272
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
273
+ # 8. CLI ๅ…ฅๅฃ
274
+ # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
275
+
276
+ def parse_args():
277
+ parser = argparse.ArgumentParser(
278
+ description="็”จๅทฒ่ฎญ็ปƒ็š„ codebook ้‡ๅŒ–ๆ–ฐ็š„ 3DGS .ply ๆ–‡ไปถ"
279
+ )
280
+ parser.add_argument('ply_path', type=str,
281
+ help='ๅพ…้‡ๅŒ–็š„ 3DGS .ply ๆ–‡ไปถ่ทฏๅพ„')
282
+ parser.add_argument('--codebook_dir', type=str, default='./codebooks',
283
+ help='ๅญ˜ๆ”พๅ››ไธช *_codebook.npz ็š„็›ฎๅฝ•๏ผˆ้ป˜่ฎค๏ผš./codebooks๏ผ‰')
284
+ parser.add_argument('--save_dir', type=str, default='./quantized',
285
+ help='้‡ๅŒ–็ป“ๆžœ่พ“ๅ‡บ็›ฎๅฝ•๏ผˆ้ป˜่ฎค๏ผš./quantized๏ผ‰')
286
+ parser.add_argument('--save_ply', action='store_true',
287
+ help='ๅŒๆ—ถ่พ“ๅ‡บ็”จ codebook ้‡ๅปบ็‰นๅพๅŽ็š„ .ply ๆ–‡ไปถ')
288
+ return parser.parse_args()
289
+
290
+
291
+ if __name__ == '__main__':
292
+ args = parse_args()
293
+ os.makedirs(args.save_dir, exist_ok=True)
294
+
295
+ # โ”€โ”€ ่ฏปๅ–ๆ–ฐๅœบๆ™ฏ โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
296
+ data = read_ply(args.ply_path)
297
+
298
+ # โ”€โ”€ ้‡ๅŒ– โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
299
+ results, codebooks = quantize_all(data, args.codebook_dir)
300
+
301
+ # โ”€โ”€ ๆ‰“ๅฐๆฑ‡ๆ€ป โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
302
+ print_summary(results)
303
+
304
+ # โ”€โ”€ ไฟๅญ˜็ดขๅผ• โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
305
+ scene_name = os.path.splitext(os.path.basename(args.ply_path))[0]
306
+ npz_path = os.path.join(args.save_dir, f"{scene_name}_quantized.npz")
307
+ save_quantized(npz_path, data, results)
308
+
309
+ # โ”€โ”€ ๏ผˆๅฏ้€‰๏ผ‰ๅ†™ๅ›ž้‡ๅปบ ply โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
310
+ if args.save_ply:
311
+ ply_out = os.path.join(args.save_dir, f"{scene_name}_reconstructed.ply")
312
+ save_reconstructed_ply(ply_out, data, results)
313
+
314
+ print("\nๅ…จ้ƒจๅฎŒๆˆ๏ผ")