bdck commited on
Commit
9051cce
·
verified ·
1 Parent(s): 0946eea

Upload io_utils.py

Browse files
Files changed (1) hide show
  1. io_utils.py +190 -0
io_utils.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Pure-Python point cloud I/O and mesh exporters.
3
+ No Open3D or trimesh required.
4
+ """
5
+ import struct
6
+ import numpy as np
7
+
8
+
9
+ def _load_ply_ascii(lines, header_end):
10
+ props = []
11
+ for line in lines[:header_end + 1]:
12
+ if line.startswith('property'):
13
+ props.append(line.strip().split()[-1])
14
+ data = []
15
+ for line in lines[header_end + 1:]:
16
+ if not line.strip():
17
+ continue
18
+ parts = line.strip().split()
19
+ if len(parts) < len(props):
20
+ break
21
+ data.append([float(p) for p in parts])
22
+ data = np.array(data, dtype=np.float32)
23
+ if 'x' in props and 'y' in props and 'z' in props:
24
+ xyz = data[:, [props.index(c) for c in ['x', 'y', 'z']]]
25
+ return xyz
26
+ return data[:, :3]
27
+
28
+
29
+ def _load_ply_binary(lines, header_end, data_bytes, is_little_endian=True):
30
+ order = '<' if is_little_endian else '>'
31
+ props = []
32
+ for line in lines[:header_end + 1]:
33
+ if line.startswith('property'):
34
+ parts = line.strip().split()
35
+ dtype_map = {'float': ('f', 4), 'double': ('d', 8),
36
+ 'uchar': ('B', 1), 'char': ('b', 1),
37
+ 'ushort': ('H', 2), 'short': ('h', 2),
38
+ 'uint': ('I', 4), 'int': ('i', 4)}
39
+ props.append(dtype_map.get(parts[1], ('f', 4)))
40
+ n = len(props)
41
+ fmt = order + ''.join([p[0] for p in props])
42
+ size = struct.calcsize(fmt)
43
+ n_verts = len(data_bytes) // size
44
+ verts = []
45
+ for i in range(n_verts):
46
+ row = struct.unpack_from(fmt, data_bytes, i * size)
47
+ verts.append(row[:3])
48
+ return np.array(verts, dtype=np.float32)
49
+
50
+
51
+ def load_pointcloud(path):
52
+ """Load PLY or XYZ point cloud as (N, 3) numpy array."""
53
+ if path.lower().endswith('.ply'):
54
+ with open(path, 'rb') as f:
55
+ header = []
56
+ while True:
57
+ line = f.readline().decode('ascii').strip()
58
+ header.append(line)
59
+ if line == 'end_header':
60
+ break
61
+ data = f.read()
62
+ fmt_line = [l for l in header if l.startswith('format')]
63
+ if not fmt_line:
64
+ raise ValueError("Invalid PLY file")
65
+ fmt = fmt_line[0].split()[1]
66
+ if fmt == 'ascii':
67
+ # Re-read as text for ASCII
68
+ with open(path, 'r') as f:
69
+ lines = f.readlines()
70
+ header_end = next((i for i, l in enumerate(lines) if l.strip() == 'end_header'), len(lines))
71
+ return _load_ply_ascii(lines, header_end)
72
+ elif fmt == 'binary_little_endian':
73
+ header_end = next((i for i, l in enumerate(header) if l == 'end_header'), len(header))
74
+ return _load_ply_binary(header, header_end, data, True)
75
+ elif fmt == 'binary_big_endian':
76
+ header_end = next((i for i, l in enumerate(header) if l == 'end_header'), len(header))
77
+ return _load_ply_binary(header, header_end, data, False)
78
+ else:
79
+ raise ValueError(f"Unsupported PLY format: {fmt}")
80
+ elif path.lower().endswith('.xyz') or path.lower().endswith('.txt'):
81
+ return np.loadtxt(path, dtype=np.float32)[:, :3]
82
+ elif path.lower().endswith('.pcd'):
83
+ with open(path, 'r') as f:
84
+ lines = f.readlines()
85
+ header = True
86
+ data = []
87
+ for line in lines:
88
+ if header:
89
+ if line.startswith('DATA'):
90
+ header = False
91
+ continue
92
+ parts = line.strip().split()
93
+ if len(parts) >= 3:
94
+ data.append([float(parts[0]), float(parts[1]), float(parts[2])])
95
+ return np.array(data, dtype=np.float32)
96
+ else:
97
+ raise ValueError(f"Unsupported file format: {path}")
98
+
99
+
100
+ def normalize_pointcloud(points):
101
+ """Center and scale to unit cube [-0.5, 0.5]^3."""
102
+ bbox_min = points.min(axis=0)
103
+ bbox_max = points.max(axis=0)
104
+ center = (bbox_min + bbox_max) * 0.5
105
+ scale = (bbox_max - bbox_min).max()
106
+ if scale == 0:
107
+ scale = 1.0
108
+ return (points - center) / scale, center, scale
109
+
110
+
111
+ def denormalize_pointcloud(points, center, scale):
112
+ """Restore original scale."""
113
+ return points * scale + center
114
+
115
+
116
+ def save_mesh_obj(path, vertices, faces):
117
+ """Save as Wavefront OBJ."""
118
+ with open(path, 'w') as f:
119
+ for v in vertices:
120
+ f.write(f"v {v[0]:.6f} {v[1]:.6f} {v[2]:.6f}\n")
121
+ for face in faces:
122
+ f.write(f"f {face[0]+1} {face[1]+1} {face[2]+1}\n")
123
+
124
+
125
+ def save_mesh_ply(path, vertices, faces):
126
+ """Save as ASCII PLY."""
127
+ n_verts = len(vertices)
128
+ n_faces = len(faces)
129
+ header = f"""ply
130
+ format ascii 1.0
131
+ element vertex {n_verts}
132
+ property float x
133
+ property float y
134
+ property float z
135
+ element face {n_faces}
136
+ property list uchar int vertex_indices
137
+ end_header
138
+ """
139
+ with open(path, 'w') as f:
140
+ f.write(header)
141
+ for v in vertices:
142
+ f.write(f"{v[0]:.6f} {v[1]:.6f} {v[2]:.6f}\n")
143
+ for face in faces:
144
+ f.write(f"3 {face[0]} {face[1]} {face[2]}\n")
145
+
146
+
147
+ def estimate_normals(points, k=16):
148
+ """PCA-based normal estimation using scipy KDTree."""
149
+ from scipy.spatial import KDTree
150
+ tree = KDTree(points)
151
+ _, idx = tree.query(points, k=min(k, len(points)))
152
+ normals = np.zeros_like(points)
153
+ for i in range(len(points)):
154
+ neighbors = points[idx[i]]
155
+ cov = np.cov((neighbors - points[i]).T)
156
+ eigvals, eigvecs = np.linalg.eigh(cov)
157
+ normal = eigvecs[:, np.argmin(eigvals)]
158
+ normals[i] = normal
159
+ # Consistent orientation (outward via centroid heuristic)
160
+ centroid = points.mean(axis=0)
161
+ for i in range(len(points)):
162
+ if np.dot(normals[i], points[i] - centroid) < 0:
163
+ normals[i] *= -1
164
+ return normals
165
+
166
+
167
+ def fps_sample(points, npoint):
168
+ """Farthest point sampling, pure numpy."""
169
+ n = len(points)
170
+ npoint = min(npoint, n)
171
+ centroids = np.zeros(npoint, dtype=np.int64)
172
+ distance = np.ones(n) * 1e10
173
+ farthest = np.random.randint(0, n)
174
+ for i in range(npoint):
175
+ centroids[i] = farthest
176
+ centroid = points[farthest]
177
+ dist = np.linalg.norm(points - centroid, axis=1)
178
+ mask = dist < distance
179
+ distance[mask] = dist[mask]
180
+ farthest = np.argmax(distance)
181
+ return centroids
182
+
183
+
184
+ def build_sigma_knn(points, k=51):
185
+ """Compute per-point sigma as k-th nearest neighbor distance."""
186
+ from scipy.spatial import KDTree
187
+ tree = KDTree(points)
188
+ k_eff = min(k, len(points))
189
+ d, _ = tree.query(points, k=k_eff)
190
+ return d[:, -1]