arnabai commited on
Commit
dd94919
Β·
verified Β·
1 Parent(s): 76f3662

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +84 -41
  2. raster_to_dxf.py +484 -370
  3. requirements.txt +7 -6
app.py CHANGED
@@ -1,41 +1,84 @@
1
- import gradio as gr
2
- import tempfile, os
3
- from raster_to_dxf import convert
4
-
5
- def run_convert(image_path, upscale, denoise, hough_min, hough_gap, scale_mm):
6
- settings = {
7
- "upscale": upscale,
8
- "denoise_h": int(denoise),
9
- "hough_min_len": int(hough_min),
10
- "hough_max_gap": int(hough_gap),
11
- "output_scale_mm": scale_mm,
12
- }
13
- out_path = tempfile.mktemp(suffix=".dxf")
14
- stats = convert(image_path, out_path, settings)
15
- summary = (f"βœ… Lines: {stats['lines']} | "
16
- f"Polylines: {stats['polylines']} | "
17
- f"Circles: {stats['circles']}")
18
- return out_path, summary
19
-
20
- with gr.Blocks(title="VectorForge β€” PNG to DXF") as demo:
21
- gr.Markdown("# ⬑ VectorForge\n### Raster-to-Vector converter Β· PNG β†’ DXF")
22
-
23
- with gr.Row():
24
- with gr.Column():
25
- image_in = gr.Image(type="filepath", label="Upload PNG / JPG / BMP")
26
- upscale = gr.Slider(1, 4, value=2, step=0.5, label="Upscale factor")
27
- denoise = gr.Slider(1, 20, value=6, step=1, label="Denoise strength")
28
- hmin = gr.Slider(5, 80, value=18, step=1, label="Hough min line length (px)")
29
- hgap = gr.Slider(2, 40, value=10, step=1, label="Hough max gap (px)")
30
- scale = gr.Slider(0.05, 1, value=0.1, step=0.05, label="Output scale (mm/px)")
31
- btn = gr.Button("⚑ Convert to DXF", variant="primary")
32
-
33
- with gr.Column():
34
- dxf_out = gr.File(label="Download DXF")
35
- status = gr.Textbox(label="Stats", interactive=False)
36
-
37
- btn.click(run_convert,
38
- inputs=[image_in, upscale, denoise, hmin, hgap, scale],
39
- outputs=[dxf_out, status])
40
-
41
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import tempfile, os
3
+ from raster_to_dxf import convert
4
+
5
+ def run_convert(image_path, upscale, denoise, threshold,
6
+ min_branch, straight_tol, scale_mm,
7
+ circularity, min_r_px):
8
+ settings = {
9
+ "upscale": int(upscale),
10
+ "denoise_h": int(denoise),
11
+ "threshold_value": int(threshold),
12
+ "min_branch_len": int(min_branch),
13
+ "straightness_tol": float(straight_tol),
14
+ "output_scale_mm": float(scale_mm),
15
+ "circle_circularity": float(circularity),
16
+ "circle_min_r_px": float(min_r_px),
17
+ }
18
+ out_path = tempfile.mktemp(suffix=".dxf")
19
+ stats = convert(image_path, out_path, settings)
20
+ summary = (f"βœ… Lines: {stats['lines']} | "
21
+ f"Polylines: {stats['polylines']} | "
22
+ f"Circles: {stats['circles']} | "
23
+ f"Arcs: {stats['arcs']}")
24
+ return out_path, summary
25
+
26
+ with gr.Blocks(title="VectorForge v2 β€” PNG to DXF") as demo:
27
+ gr.Markdown("""
28
+ # ⬑ VectorForge v2
29
+ ### Clean centreline engineering drawing converter Β· PNG β†’ DXF
30
+ Produces single-stroke geometry via skeleton graph tracing β€” not filled blobs.
31
+ """)
32
+
33
+ with gr.Row():
34
+ with gr.Column():
35
+ image_in = gr.Image(type="filepath", label="Upload PNG / JPG / BMP")
36
+
37
+ gr.Markdown("### Pre-processing")
38
+ upscale = gr.Slider(1, 4, value=3, step=1, label="Upscale factor (higher = better skeleton, slower)")
39
+ denoise = gr.Slider(1, 20, value=8, step=1, label="Denoise strength")
40
+ threshold = gr.Slider(100,254, value=200, step=5, label="Ink threshold (lower = pick up faint lines)")
41
+
42
+ gr.Markdown("### Line geometry")
43
+ min_branch = gr.Slider(4, 50, value=12, step=2, label="Min skeleton branch length (px) β€” raise to remove noise")
44
+ straight_tol= gr.Slider(0.5,5, value=1.5, step=0.5, label="Straightness tolerance (px) β€” raise to convert curves to lines")
45
+
46
+ gr.Markdown("### Circle detection")
47
+ circularity = gr.Slider(0.5, 1.0, value=0.72, step=0.02, label="Min circularity (0.72=loose, 0.90=strict circles only)")
48
+ min_r_px = gr.Slider(3, 50, value=10, step=1, label="Min circle radius (upscaled px)")
49
+
50
+ gr.Markdown("### Output")
51
+ scale_mm = gr.Slider(0.01, 1.0, value=0.1, step=0.01, label="Scale (mm per source pixel)")
52
+
53
+ btn = gr.Button("⚑ Convert to DXF", variant="primary")
54
+
55
+ with gr.Column():
56
+ dxf_out = gr.File(label="Download DXF")
57
+ status = gr.Textbox(label="Result stats", interactive=False)
58
+
59
+ gr.Markdown("""
60
+ ### Layer guide
61
+ | Layer | Contents |
62
+ |---|---|
63
+ | `GEOMETRY` | All straight lines and polylines |
64
+ | `CIRCLES` | Detected circular elements |
65
+ | `ARCS` | Curved arc segments |
66
+
67
+ ### Recommended settings by drawing type
68
+ | Drawing type | Upscale | Min branch | Straight tol |
69
+ |---|---|---|---|
70
+ | Clean CAD scan | 3 | 12 | 1.5 |
71
+ | Photo / skewed | 3 | 20 | 2.5 |
72
+ | Dense schematic | 2 | 8 | 1.0 |
73
+ | Faint/old print | 4 | 16 | 2.0 |
74
+ """)
75
+
76
+ btn.click(
77
+ run_convert,
78
+ inputs=[image_in, upscale, denoise, threshold,
79
+ min_branch, straight_tol, scale_mm,
80
+ circularity, min_r_px],
81
+ outputs=[dxf_out, status]
82
+ )
83
+
84
+ demo.launch()
raster_to_dxf.py CHANGED
@@ -1,302 +1,461 @@
1
  #!/usr/bin/env python3
2
  """
3
- Production-grade Raster-to-Vector converter: PNG -> DXF
4
- Optimised for engineering/architectural drawings (like HVAC details).
 
5
  """
6
 
7
- import sys
8
- import argparse
 
 
 
9
  import numpy as np
10
  import cv2
11
  import ezdxf
12
  from ezdxf import units
13
- from pathlib import Path
14
- from dataclasses import dataclass, field
15
- from typing import List, Tuple, Optional
16
- import math
17
 
18
 
19
- # ─── Data types ──────────────────────────────────────────────────────────────
 
 
20
 
21
  @dataclass
22
- class VectorLine:
 
 
 
 
 
23
  x1: float; y1: float; x2: float; y2: float
24
- layer: str = "LINES"
25
 
26
  @dataclass
27
- class VectorPolyline:
28
- points: List[Tuple[float, float]]
29
  closed: bool = False
30
- layer: str = "CONTOURS"
31
 
32
  @dataclass
33
- class VectorCircle:
34
  cx: float; cy: float; r: float
35
  layer: str = "CIRCLES"
36
 
37
  @dataclass
38
- class VectorArc:
39
  cx: float; cy: float; r: float
40
  start_angle: float; end_angle: float
41
  layer: str = "ARCS"
42
 
43
- @dataclass
44
- class VectorText:
45
- x: float; y: float; text: str; height: float = 2.5
46
- layer: str = "TEXT"
47
-
48
  @dataclass
49
  class VectorResult:
50
- lines: List[VectorLine] = field(default_factory=list)
51
- polylines: List[VectorPolyline] = field(default_factory=list)
52
- circles: List[VectorCircle] = field(default_factory=list)
53
- arcs: List[VectorArc] = field(default_factory=list)
54
- texts: List[VectorText] = field(default_factory=list)
55
- width_px: int = 0
56
- height_px: int = 0
57
- scale: float = 1.0 # px β†’ mm
58
 
59
 
60
- # ─── Image pre-processing ────────────────────────────────────────────────────
 
 
61
 
62
- def preprocess(img_bgr: np.ndarray, settings: dict) -> Tuple[np.ndarray, np.ndarray]:
63
- """Return (binary_clean, gray_original)."""
64
- gray = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2GRAY)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
- # Upscale if small
 
 
 
 
 
 
67
  h, w = gray.shape
68
- scale_up = settings.get("upscale", 2.0)
69
- if min(h, w) < 800 or scale_up != 1.0:
70
- gray = cv2.resize(gray, (int(w * scale_up), int(h * scale_up)),
71
- interpolation=cv2.INTER_CUBIC)
72
-
73
- # Denoise
74
- denoised = cv2.fastNlMeansDenoising(gray, h=settings.get("denoise_h", 8),
75
- templateWindowSize=7, searchWindowSize=21)
76
-
77
- # Adaptive threshold + Otsu combined
78
- block = settings.get("adaptive_block", 51)
79
- C = settings.get("adaptive_C", 10)
80
- adapt = cv2.adaptiveThreshold(denoised, 255,
81
- cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
82
- cv2.THRESH_BINARY_INV, block, C)
83
-
84
- _, otsu = cv2.threshold(denoised, 0, 255,
85
- cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)
86
- binary = cv2.bitwise_or(adapt, otsu)
87
-
88
- # Morphological clean-up
89
- k_open = settings.get("morph_open", 2)
90
- k_close = settings.get("morph_close", 3)
91
- kernel_o = cv2.getStructuringElement(cv2.MORPH_RECT, (k_open, k_open))
92
- kernel_c = cv2.getStructuringElement(cv2.MORPH_RECT, (k_close, k_close))
93
- binary = cv2.morphologyEx(binary, cv2.MORPH_OPEN, kernel_o)
94
- binary = cv2.morphologyEx(binary, cv2.MORPH_CLOSE, kernel_c)
95
-
96
- return binary, gray
97
-
98
-
99
- # ─── Skeleton / thinning for line extraction ─────────────────────────────────
100
-
101
- def thin_image(binary: np.ndarray) -> np.ndarray:
102
- """Skeletonise binary image (Zhang-Suen via scikit-image)."""
103
- from skimage.morphology import skeletonize as sk_skeletonize
104
- bool_img = (binary > 0)
105
- skel = sk_skeletonize(bool_img)
106
- return (skel.astype(np.uint8) * 255)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
 
 
108
 
109
- # ─── Probabilistic Hough lines ───────────────────────────────────────────────
110
-
111
- def extract_hough_lines(thin: np.ndarray, settings: dict,
112
- scale: float) -> List[VectorLine]:
113
- lines_out = []
114
- rho = settings.get("hough_rho", 1)
115
- theta = np.pi / 180 * settings.get("hough_theta_deg", 1)
116
- threshold = settings.get("hough_threshold", 30)
117
- min_len = settings.get("hough_min_len", 20)
118
- max_gap = settings.get("hough_max_gap", 8)
119
-
120
- detected = cv2.HoughLinesP(thin, rho, theta, threshold,
121
- minLineLength=min_len, maxLineGap=max_gap)
122
- h = thin.shape[0]
123
- if detected is not None:
124
- for ln in detected:
125
- x1, y1, x2, y2 = ln[0]
126
- lines_out.append(VectorLine(
127
- x1 * scale, (h - y1) * scale,
128
- x2 * scale, (h - y2) * scale,
129
- layer="LINES"
130
- ))
131
- return lines_out
132
-
133
-
134
- # ─── Contour extraction (closed shapes, arcs, circles) ───────────────────────
135
-
136
- def _fit_circle(pts):
137
- """Algebraic circle fit (KΓ₯sa method)."""
138
- x = pts[:, 0].astype(float)
139
- y = pts[:, 1].astype(float)
140
- A = np.column_stack([x, y, np.ones(len(x))])
141
- b = x**2 + y**2
142
- result = np.linalg.lstsq(A, b, rcond=None)
143
- c = result[0]
144
- cx = c[0] / 2
145
- cy = c[1] / 2
146
- r = math.sqrt(c[2] + cx**2 + cy**2)
147
- residuals = np.sqrt((x - cx)**2 + (y - cy)**2) - r
148
- rmse = np.sqrt(np.mean(residuals**2))
149
- return cx, cy, r, rmse
150
 
 
 
 
151
 
152
- def _is_circular(cnt, tol=0.15) -> Optional[Tuple[float, float, float]]:
153
- if len(cnt) < 20:
154
- return None
155
- pts = cnt[:, 0, :]
156
- cx, cy, r, rmse = _fit_circle(pts)
157
- if r < 3:
158
- return None
159
- if rmse / r < tol:
160
- return cx, cy, r
161
- return None
162
-
163
-
164
- def extract_contours(binary: np.ndarray, settings: dict,
165
- scale: float) -> Tuple[List[VectorPolyline],
166
- List[VectorCircle],
167
- List[VectorArc]]:
168
- polylines_out = []
169
- circles_out = []
170
- arcs_out = []
171
-
172
- min_area = settings.get("contour_min_area", 50)
173
- epsilon_r = settings.get("contour_epsilon_ratio", 0.004)
174
- h = binary.shape[0]
175
-
176
- contours, hierarchy = cv2.findContours(binary, cv2.RETR_CCOMP,
177
- cv2.CHAIN_APPROX_TC89_KCOS)
178
- if hierarchy is None:
179
- return polylines_out, circles_out, arcs_out
180
-
181
- for i, cnt in enumerate(contours):
182
- area = cv2.contourArea(cnt)
183
- if area < min_area:
184
- continue
185
 
186
- # Try circle
187
- circle = _is_circular(cnt)
188
- if circle:
189
- cx, cy, r = circle
190
- circles_out.append(VectorCircle(
191
- cx * scale, (h - cy) * scale, r * scale
192
- ))
193
- continue
194
 
195
- # Approximate polygon / spline
196
- peri = cv2.arcLength(cnt, True)
197
- epsilon = epsilon_r * peri
198
- approx = cv2.approxPolyDP(cnt, epsilon, True)
199
- if len(approx) < 2:
200
- continue
201
 
202
- pts = [(p[0][0] * scale, (h - p[0][1]) * scale) for p in approx]
203
- is_closed = (hierarchy[0][i][2] >= 0 or
204
- cv2.isContourConvex(approx) or
205
- len(pts) > 4)
206
- polylines_out.append(VectorPolyline(pts, closed=is_closed))
207
 
208
- return polylines_out, circles_out, arcs_out
 
 
209
 
210
 
211
- # ─── Line merging / deduplication ────────────────────────────────────────────
 
 
212
 
213
- def _line_angle(vl: VectorLine) -> float:
214
- return math.atan2(vl.y2 - vl.y1, vl.x2 - vl.x1)
215
 
 
 
 
 
 
 
 
 
 
 
 
 
216
 
217
- def _line_length(vl: VectorLine) -> float:
218
- return math.hypot(vl.x2 - vl.x1, vl.y2 - vl.y1)
219
 
 
 
220
 
221
- def _point_to_line_dist(px, py, x1, y1, x2, y2) -> float:
222
- dx, dy = x2 - x1, y2 - y1
223
- denom = math.hypot(dx, dy)
224
- if denom < 1e-9:
225
- return math.hypot(px - x1, py - y1)
226
- return abs(dy * px - dx * py + x2 * y1 - y2 * x1) / denom
227
 
 
 
 
 
 
 
 
 
228
 
229
- def merge_lines(lines: List[VectorLine],
230
- angle_tol: float = 3.0,
231
- dist_tol: float = 2.5) -> List[VectorLine]:
232
- """Merge nearly collinear line segments."""
233
- if not lines:
234
- return lines
235
- angle_tol_rad = math.radians(angle_tol)
236
- merged = []
237
- used = [False] * len(lines)
238
 
239
- for i, a in enumerate(lines):
240
- if used[i]:
241
- continue
242
- ang_a = _line_angle(a) % math.pi
243
- group = [a]
244
- used[i] = True
245
- for j, b in enumerate(lines):
246
- if used[j]:
247
- continue
248
- ang_b = _line_angle(b) % math.pi
249
- da = min(abs(ang_a - ang_b), math.pi - abs(ang_a - ang_b))
250
- if da > angle_tol_rad:
251
- continue
252
- d = _point_to_line_dist(b.x1, b.y1, a.x1, a.y1, a.x2, a.y2)
253
- if d > dist_tol:
254
  continue
255
- group.append(b)
256
- used[j] = True
257
-
258
- # Longest span in the group
259
- pts = [(g.x1, g.y1) for g in group] + [(g.x2, g.y2) for g in group]
260
- best_len = -1
261
- bx1 = bx2 = by1 = by2 = 0.0
262
- for p1 in pts:
263
- for p2 in pts:
264
- l = math.hypot(p2[0] - p1[0], p2[1] - p1[1])
265
- if l > best_len:
266
- best_len = l
267
- bx1, by1 = p1
268
- bx2, by2 = p2
269
- merged.append(VectorLine(bx1, by1, bx2, by2))
270
- return merged
271
-
272
-
273
- # ─── Main conversion pipeline ────────────────────────────────────────────────
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
274
 
275
- DEFAULT_SETTINGS = {
276
- "upscale": 2.0,
277
- "denoise_h": 6,
278
- "adaptive_block": 51,
279
- "adaptive_C": 10,
280
- "morph_open": 2,
281
- "morph_close": 3,
282
- "hough_rho": 1,
283
- "hough_theta_deg": 0.5,
284
- "hough_threshold": 25,
285
- "hough_min_len": 18,
286
- "hough_max_gap": 10,
287
- "contour_min_area": 40,
288
- "contour_epsilon_ratio": 0.003,
289
- "merge_angle_tol": 2.5,
290
- "merge_dist_tol": 3.0,
291
- "output_scale_mm": 0.1, # 1 px in output image β†’ 0.1 mm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
292
  }
293
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
294
 
295
- def convert(input_path: str,
296
- output_path: str,
297
- settings: dict = None,
298
- progress_cb=None) -> dict:
299
- """Full conversion pipeline. Returns stats dict."""
300
  s = {**DEFAULT_SETTINGS, **(settings or {})}
301
 
302
  def progress(msg, pct):
@@ -305,48 +464,80 @@ def convert(input_path: str,
305
  else:
306
  print(f" [{pct:3d}%] {msg}")
307
 
308
- # ── Load ──────────────────────────────────────────────────────
309
  progress("Loading image…", 5)
310
  img = cv2.imread(input_path)
311
  if img is None:
312
- raise FileNotFoundError(f"Cannot load: {input_path}")
313
  h0, w0 = img.shape[:2]
314
 
315
- # ── Preprocess ────────────────────────────────────────────────
316
- progress("Preprocessing (denoise + threshold)…", 15)
317
- binary, gray = preprocess(img, s)
318
- h_up, w_up = binary.shape[:2]
319
- scale = s["output_scale_mm"] / s["upscale"] # px (upscaled) β†’ mm
320
-
321
- # ── Thin ──────────────────────────────────────────────────────
322
- progress("Thinning / skeletonising…", 28)
323
- thin = thin_image(binary)
324
-
325
- # ── Hough lines ───────────────────────────────────────────────
326
- progress("Extracting straight lines (Hough)…", 40)
327
- raw_lines = extract_hough_lines(thin, s, scale)
328
-
329
- # ── Merge ─────────────────────────────────────────────────────
330
- progress("Merging collinear segments…", 52)
331
- merged_lines = merge_lines(raw_lines,
332
- angle_tol=s["merge_angle_tol"],
333
- dist_tol=s["merge_dist_tol"])
334
-
335
- # ── Contours ──────────────────────────────────────────────────
336
- progress("Extracting contours, circles, arcs…", 64)
337
- polylines, circles, arcs = extract_contours(binary, s, scale)
338
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
339
  result = VectorResult(
340
- lines=merged_lines,
341
- polylines=polylines,
342
- circles=circles,
343
- arcs=arcs,
344
- width_px=w0,
345
- height_px=h0,
346
- scale=scale,
347
  )
348
 
349
- # ── Write DXF ─────────────────────────────────────────────────
350
  progress("Writing DXF…", 80)
351
  write_dxf(result, output_path)
352
 
@@ -362,102 +553,25 @@ def convert(input_path: str,
362
  return stats
363
 
364
 
365
- # ─── DXF writer ──────────────────────────────────────────────────────────────
366
-
367
- LAYER_COLORS = {
368
- "LINES": 7, # white/black
369
- "CONTOURS": 3, # green
370
- "CIRCLES": 4, # cyan
371
- "ARCS": 1, # red
372
- "TEXT": 2, # yellow
373
- }
374
-
375
-
376
- def write_dxf(result: VectorResult, path: str):
377
- doc = ezdxf.new(dxfversion="R2010")
378
- doc.units = units.MM
379
- msp = doc.modelspace()
380
-
381
- # Create layers
382
- for name, color in LAYER_COLORS.items():
383
- if name not in doc.layers:
384
- doc.layers.add(name, dxfattribs={"color": color, "lineweight": 25})
385
-
386
- # Lines
387
- for vl in result.lines:
388
- msp.add_line(
389
- (vl.x1, vl.y1), (vl.x2, vl.y2),
390
- dxfattribs={"layer": vl.layer}
391
- )
392
-
393
- # Polylines
394
- for vp in result.polylines:
395
- if len(vp.points) >= 2:
396
- if vp.closed and len(vp.points) >= 3:
397
- msp.add_lwpolyline(
398
- vp.points,
399
- close=True,
400
- dxfattribs={"layer": vp.layer}
401
- )
402
- else:
403
- msp.add_lwpolyline(
404
- vp.points,
405
- close=False,
406
- dxfattribs={"layer": vp.layer}
407
- )
408
-
409
- # Circles
410
- for vc in result.circles:
411
- msp.add_circle(
412
- (vc.cx, vc.cy), vc.r,
413
- dxfattribs={"layer": vc.layer}
414
- )
415
-
416
- # Arcs
417
- for va in result.arcs:
418
- msp.add_arc(
419
- (va.cx, va.cy), va.r,
420
- va.start_angle, va.end_angle,
421
- dxfattribs={"layer": va.layer}
422
- )
423
-
424
- # Texts
425
- for vt in result.texts:
426
- msp.add_text(
427
- vt.text,
428
- dxfattribs={"layer": vt.layer, "height": vt.height,
429
- "insert": (vt.x, vt.y)}
430
- )
431
-
432
- doc.saveas(path)
433
-
434
-
435
- # ─── CLI ─────────────────────────────────────────────────────────────────────
436
 
437
  if __name__ == "__main__":
438
- parser = argparse.ArgumentParser(
439
- description="Raster-to-Vector converter: PNG β†’ DXF")
440
- parser.add_argument("input", help="Input PNG/JPG/BMP path")
441
- parser.add_argument("output", help="Output DXF path")
442
- parser.add_argument("--upscale", type=float, default=2.0)
443
- parser.add_argument("--denoise", type=int, default=6)
444
- parser.add_argument("--hough-min", type=int, default=18,
445
- dest="hough_min_len")
446
- parser.add_argument("--hough-gap", type=int, default=10,
447
- dest="hough_max_gap")
448
- parser.add_argument("--scale-mm", type=float, default=0.1,
449
- dest="output_scale_mm",
450
- help="mm per source pixel (default 0.1)")
451
  args = parser.parse_args()
452
 
453
- settings = {
454
- "upscale": args.upscale,
455
- "denoise_h": args.denoise,
456
- "hough_min_len": args.hough_min_len,
457
- "hough_max_gap": args.hough_max_gap,
458
- "output_scale_mm": args.output_scale_mm,
459
- }
460
- stats = convert(args.input, args.output, settings)
461
- print("\nConversion complete:")
462
  for k, v in stats.items():
463
  print(f" {k}: {v}")
 
1
  #!/usr/bin/env python3
2
  """
3
+ VectorForge v2 β€” Production Raster-to-Vector for Engineering Drawings
4
+ Strategy: skeleton β†’ graph tracing β†’ polyline fitting β†’ symbol recognition β†’ DXF
5
+ Produces clean single-stroke centreline geometry, not filled blobs.
6
  """
7
 
8
+ import sys, math, argparse
9
+ from pathlib import Path
10
+ from dataclasses import dataclass, field
11
+ from typing import List, Tuple, Optional, Dict
12
+
13
  import numpy as np
14
  import cv2
15
  import ezdxf
16
  from ezdxf import units
17
+ from skimage.morphology import skeletonize as sk_skeletonize
18
+ import networkx as nx
 
 
19
 
20
 
21
+ # ═══════════════════════════════════════════════════════════════
22
+ # DATA TYPES
23
+ # ═══════════════════════════════════════════════════════════════
24
 
25
  @dataclass
26
+ class Segment:
27
+ """A traced polyline segment from the skeleton graph."""
28
+ pts: List[Tuple[float, float]] # pixel coords (upscaled)
29
+
30
+ @dataclass
31
+ class DXFLine:
32
  x1: float; y1: float; x2: float; y2: float
33
+ layer: str = "GEOMETRY"
34
 
35
  @dataclass
36
+ class DXFPolyline:
37
+ pts: List[Tuple[float, float]]
38
  closed: bool = False
39
+ layer: str = "GEOMETRY"
40
 
41
  @dataclass
42
+ class DXFCircle:
43
  cx: float; cy: float; r: float
44
  layer: str = "CIRCLES"
45
 
46
  @dataclass
47
+ class DXFArc:
48
  cx: float; cy: float; r: float
49
  start_angle: float; end_angle: float
50
  layer: str = "ARCS"
51
 
 
 
 
 
 
52
  @dataclass
53
  class VectorResult:
54
+ lines: List[DXFLine] = field(default_factory=list)
55
+ polylines: List[DXFPolyline] = field(default_factory=list)
56
+ circles: List[DXFCircle] = field(default_factory=list)
57
+ arcs: List[DXFArc] = field(default_factory=list)
58
+ source_w: int = 0
59
+ source_h: int = 0
 
 
60
 
61
 
62
+ # ═══════════════════════════════════════════════════════════════
63
+ # DEFAULTS
64
+ # ═══════════════════════════════════════════════════════════════
65
 
66
+ DEFAULT_SETTINGS = {
67
+ # Pre-processing
68
+ "upscale": 3, # 3Γ— gives good skeleton quality
69
+ "threshold_value": 200, # pixels darker than this = ink
70
+ "denoise_h": 8,
71
+ "morph_open": 1, # remove single-px specks
72
+ "morph_close": 2, # close tiny gaps in lines
73
+
74
+ # Skeleton tracing
75
+ "min_branch_len": 12, # px (upscaled) β€” prune short skeleton branches
76
+ "douglas_peucker_eps": 1.2, # px β€” simplify traced paths
77
+
78
+ # Line fitting on segments
79
+ "straightness_tol": 1.5, # px β€” max deviation to call a segment straight
80
+ "min_line_len": 8, # px (upscaled) β€” skip tiny lines
81
+
82
+ # Circle / arc detection (on binary, before skeletonize)
83
+ "circle_min_r": 6, # px (upscaled)
84
+ "circle_max_r": 800,
85
+ "circle_dp": 1.2,
86
+ "circle_param1": 60, # Canny upper threshold
87
+ "circle_param2": 22, # accumulator threshold (lower = more circles)
88
+ "circle_min_dist": 20, # min distance between circle centres
89
+
90
+ # Arc fitting on curved segments
91
+ "arc_fit_min_pts": 12, # min skeleton points to attempt arc fit
92
+ "arc_fit_tol": 2.0, # px RMSE to accept arc fit
93
+
94
+ # Output
95
+ "output_scale_mm": 0.1, # mm per source pixel
96
+ }
97
+
98
+
99
+ # ═══════════════════════════════════════════════════════════════
100
+ # STAGE 1 β€” PRE-PROCESSING
101
+ # ═══════════════════════════════════════════════════════════════
102
 
103
+ def preprocess(img_bgr: np.ndarray, s: dict) -> Tuple[np.ndarray, np.ndarray]:
104
+ """
105
+ Returns (binary_ink, gray_upscaled).
106
+ binary_ink: 255 = ink pixel, 0 = background (upscaled).
107
+ """
108
+ gray = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2GRAY)
109
+ scale = s["upscale"]
110
  h, w = gray.shape
111
+ gray_up = cv2.resize(gray, (w * scale, h * scale),
112
+ interpolation=cv2.INTER_CUBIC)
113
+
114
+ denoised = cv2.fastNlMeansDenoising(
115
+ gray_up, h=s["denoise_h"], templateWindowSize=7, searchWindowSize=21)
116
+
117
+ # Simple global threshold β€” works well for scanned/clean drawings
118
+ tval = s["threshold_value"]
119
+ _, binary = cv2.threshold(denoised, tval, 255, cv2.THRESH_BINARY_INV)
120
+
121
+ # Morphological cleanup
122
+ ko = cv2.getStructuringElement(cv2.MORPH_RECT,
123
+ (s["morph_open"], s["morph_open"]))
124
+ kc = cv2.getStructuringElement(cv2.MORPH_RECT,
125
+ (s["morph_close"], s["morph_close"]))
126
+ binary = cv2.morphologyEx(binary, cv2.MORPH_OPEN, ko)
127
+ binary = cv2.morphologyEx(binary, cv2.MORPH_CLOSE, kc)
128
+
129
+ return binary, gray_up
130
+
131
+
132
+ # ═══════════════════════════════════════════════════════════════
133
+ # STAGE 2 β€” CIRCLE / ARC DETECTION (before skeletonize)
134
+ # ═══════════════════════════════════════════════════════════════
135
+
136
+ def _detect_circles_contour(binary: np.ndarray, s: dict,
137
+ img_h_up: int, scale_up: int,
138
+ mm_per_src_px: float
139
+ ) -> Tuple[List[DXFCircle], list]:
140
+ """
141
+ Detect circles using contour circularity (4π·area/perimeterΒ²).
142
+ Far more accurate than Hough for engineering drawings.
143
+ Returns (dxf_circles, [(cx_px, cy_px, r_px), ...] for masking).
144
+ """
145
+ min_r = s.get("circle_min_r_px", 10) # upscaled px
146
+ min_peri = s.get("circle_min_peri", 60)
147
+ min_area = s.get("circle_min_area_c", 200)
148
+ circ_thr = s.get("circle_circularity", 0.72)
149
+
150
+ contours, _ = cv2.findContours(binary, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
151
+ seen = []
152
+ dxf_out = []
153
+
154
+ for cnt in contours:
155
+ area = cv2.contourArea(cnt)
156
+ peri = cv2.arcLength(cnt, True)
157
+ if peri < min_peri or area < min_area:
158
+ continue
159
+ circularity = 4 * math.pi * area / (peri * peri)
160
+ if circularity < circ_thr:
161
+ continue
162
+ (cx, cy), r = cv2.minEnclosingCircle(cnt)
163
+ if r < min_r:
164
+ continue
165
+ # De-duplicate in pixel space
166
+ dup = any(math.hypot(cx-ox, cy-oy) < (r + or_) * 0.5 and abs(r - or_) < r * 0.3
167
+ for ox, oy, or_ in seen)
168
+ if dup:
169
+ continue
170
+ seen.append((cx, cy, r))
171
+ cx_mm, cy_mm = px_to_mm(cx, cy, img_h_up, scale_up, mm_per_src_px)
172
+ r_mm = (r / scale_up) * mm_per_src_px
173
+ dxf_out.append(DXFCircle(cx_mm, cy_mm, r_mm))
174
 
175
+ return dxf_out, seen
176
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
 
178
+ def detect_circles(gray_up: np.ndarray, s: dict) -> List[DXFCircle]:
179
+ """Legacy β€” kept for CLI compat. Use _detect_circles_contour in pipeline."""
180
+ return []
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
 
183
+ def _erase_circles(binary: np.ndarray, circles_raw,
184
+ margin: int = 4) -> np.ndarray:
185
+ """Erase Hough-detected circles from binary."""
186
+ out = binary.copy()
187
+ if circles_raw is not None:
188
+ for x, y, r in circles_raw[0]:
189
+ cv2.circle(out, (int(x), int(y)), int(r) + margin, 0, -1)
190
+ return out
191
 
 
 
 
 
 
 
192
 
193
+ # ═══════════════════════════════════════════════════════════════
194
+ # STAGE 3 β€” SKELETONIZE
195
+ # ═══════════════════════════════════════════════════════════════
 
 
196
 
197
+ def skeletonize(binary: np.ndarray) -> np.ndarray:
198
+ skel = sk_skeletonize(binary > 0)
199
+ return (skel.astype(np.uint8) * 255)
200
 
201
 
202
+ # ═══════════════════════════════════════════════════════════════
203
+ # STAGE 4 β€” SKELETON β†’ GRAPH β†’ SEGMENTS
204
+ # ═══════════════════════════════════════════════════════════════
205
 
206
+ _NEIGHBOURS = [(-1,-1),(-1,0),(-1,1),(0,-1),(0,1),(1,-1),(1,0),(1,1)]
 
207
 
208
+ def _build_graph(skel: np.ndarray) -> nx.Graph:
209
+ """Build a graph from skeleton pixels. Nodes are (row,col), edges connect neighbours."""
210
+ G = nx.Graph()
211
+ ys, xs = np.where(skel > 0)
212
+ pts = set(zip(ys.tolist(), xs.tolist()))
213
+ for (r, c) in pts:
214
+ G.add_node((r, c))
215
+ for dr, dc in _NEIGHBOURS:
216
+ nb = (r + dr, c + dc)
217
+ if nb in pts:
218
+ G.add_edge((r, c), nb)
219
+ return G
220
 
 
 
221
 
222
+ def _node_degree(G: nx.Graph, node) -> int:
223
+ return G.degree(node)
224
 
 
 
 
 
 
 
225
 
226
+ def _trace_segments(G: nx.Graph, min_branch_len: int) -> List[List[Tuple[int,int]]]:
227
+ """
228
+ Trace skeleton graph into ordered polyline segments.
229
+ Splits at junction/endpoint nodes (degree != 2).
230
+ Uses frozenset edge keys so direction doesn't matter.
231
+ """
232
+ if len(G.nodes) == 0:
233
+ return []
234
 
235
+ # Non-chain nodes: endpoints (deg 1) + junctions (deg > 2)
236
+ branch_nodes = {n for n in G.nodes if G.degree(n) != 2}
237
+ if not branch_nodes:
238
+ branch_nodes = {next(iter(G.nodes))} # pure loop
 
 
 
 
 
239
 
240
+ visited = set()
241
+ segments = []
242
+
243
+ for start in branch_nodes:
244
+ for nb in list(G.neighbors(start)):
245
+ edge = frozenset([start, nb])
246
+ if edge in visited:
 
 
 
 
 
 
 
 
247
  continue
248
+ # Walk the chain
249
+ path = [start, nb]
250
+ visited.add(edge)
251
+ prev, cur = start, nb
252
+ while G.degree(cur) == 2:
253
+ nxts = [n for n in G.neighbors(cur) if n != prev]
254
+ if not nxts:
255
+ break
256
+ nxt = nxts[0]
257
+ e2 = frozenset([cur, nxt])
258
+ if e2 in visited:
259
+ break
260
+ visited.add(e2)
261
+ path.append(nxt)
262
+ prev, cur = cur, nxt
263
+ if len(path) >= min_branch_len:
264
+ segments.append(path)
265
+
266
+ return segments
267
+
268
+
269
+ def _simplify_path(path: List[Tuple[int,int]], eps: float) -> List[Tuple[float,float]]:
270
+ """Douglas-Peucker simplification. Input: list of (row,col). Output: (x,y) floats."""
271
+ if len(path) < 2:
272
+ return [(p[1], p[0]) for p in path]
273
+ pts = np.array([[p[1], p[0]] for p in path], dtype=np.float32)
274
+ # OpenCV DP
275
+ pts_c = pts.reshape(-1, 1, 2)
276
+ approx = cv2.approxPolyDP(pts_c, eps, False)
277
+ return [(float(p[0][0]), float(p[0][1])) for p in approx]
278
+
279
+
280
+ def trace_skeleton_to_segments(skel: np.ndarray, s: dict) -> List[Segment]:
281
+ G = _build_graph(skel)
282
+ raw_segs = _trace_segments(G, min_branch_len=s["min_branch_len"])
283
+ segments = []
284
+ for path in raw_segs:
285
+ simplified = _simplify_path(path, s["douglas_peucker_eps"])
286
+ if len(simplified) >= 2:
287
+ segments.append(Segment(pts=simplified))
288
+ return segments
289
+
290
+
291
+ # ═══════════════════════════════════════════════════════════════
292
+ # STAGE 5 β€” SEGMENT CLASSIFICATION
293
+ # (straight line | arc | polyline)
294
+ # ═══════════════════════════════════════════════════════════════
295
+
296
+ def _fit_line_error(pts) -> float:
297
+ """Max perpendicular distance from any point to the line p0β†’p1."""
298
+ p0, p1 = np.array(pts[0]), np.array(pts[-1])
299
+ d = p1 - p0
300
+ length = np.linalg.norm(d)
301
+ if length < 1e-9:
302
+ return 0.0
303
+ d_norm = d / length
304
+ errors = []
305
+ for p in pts:
306
+ v = np.array(p) - p0
307
+ proj = np.dot(v, d_norm)
308
+ perp = v - proj * d_norm
309
+ errors.append(np.linalg.norm(perp))
310
+ return max(errors)
311
+
312
+
313
+ def _fit_circle_algebraic(pts):
314
+ """KΓ₯sa algebraic circle fit. Returns (cx, cy, r, rmse)."""
315
+ x = np.array([p[0] for p in pts], dtype=float)
316
+ y = np.array([p[1] for p in pts], dtype=float)
317
+ A = np.column_stack([x, y, np.ones(len(x))])
318
+ b = x**2 + y**2
319
+ c, _, _, _ = np.linalg.lstsq(A, b, rcond=None)
320
+ cx, cy = c[0]/2, c[1]/2
321
+ r = math.sqrt(max(0, c[2] + cx**2 + cy**2))
322
+ residuals = np.sqrt((x - cx)**2 + (y - cy)**2) - r
323
+ rmse = math.sqrt(np.mean(residuals**2))
324
+ return cx, cy, r, rmse
325
 
326
+
327
+ def _arc_angles(pts, cx, cy):
328
+ """Return (start_angle, end_angle) in degrees for an arc through pts."""
329
+ angles = [math.degrees(math.atan2(cy - p[1], p[0] - cx)) % 360 for p in pts]
330
+ start = angles[0]
331
+ end = angles[-1]
332
+ return start, end
333
+
334
+
335
+ def classify_segment(seg: Segment, s: dict
336
+ ) -> Tuple[str, object]:
337
+ """
338
+ Returns ('line', DXFLine) | ('arc', DXFArc) | ('poly', DXFPolyline)
339
+ Coords still in upscaled pixels at this stage.
340
+ """
341
+ pts = seg.pts
342
+ n = len(pts)
343
+ tol = s["straightness_tol"]
344
+ min_len = s["min_line_len"]
345
+
346
+ # ── Straight line test ───────────────────────────────────────
347
+ err = _fit_line_error(pts)
348
+ p0, p1 = pts[0], pts[-1]
349
+ seg_len = math.hypot(p1[0]-p0[0], p1[1]-p0[1])
350
+
351
+ if seg_len < min_len:
352
+ return ("skip", None)
353
+
354
+ if err <= tol:
355
+ return ("line", (p0[0], p0[1], p1[0], p1[1]))
356
+
357
+ # ── Arc test ─────────────────────────────────────────────────
358
+ if n >= s["arc_fit_min_pts"]:
359
+ try:
360
+ cx, cy, r, rmse = _fit_circle_algebraic(pts)
361
+ if rmse <= s["arc_fit_tol"] and r > 3:
362
+ sa, ea = _arc_angles(pts, cx, cy)
363
+ return ("arc", (cx, cy, r, sa, ea))
364
+ except Exception:
365
+ pass
366
+
367
+ # ── Polyline fallback ────────────────────────────────────────
368
+ return ("poly", pts)
369
+
370
+
371
+ # ═══════════════════════════════════════════════════════════════
372
+ # STAGE 6 β€” COORDINATE TRANSFORM (upscaled px β†’ mm DXF)
373
+ # ═══════════════════════════════════════════════════════════════
374
+
375
+ def px_to_mm(x, y, img_h_up, scale_up, mm_per_src_px):
376
+ """Convert upscaled pixel (x,y) to DXF mm coords (flipped Y)."""
377
+ factor = mm_per_src_px / scale_up
378
+ return x * factor, (img_h_up - y) * factor
379
+
380
+
381
+ # ═══════════════════════════════════════════════════════════════
382
+ # STAGE 7 β€” DEDUPLICATION
383
+ # ═══════════════════════════════════════════════════════════════
384
+
385
+ def deduplicate_lines(lines: List[DXFLine], tol_mm: float = 0.5) -> List[DXFLine]:
386
+ """Remove near-duplicate lines."""
387
+ kept = []
388
+ for a in lines:
389
+ dup = False
390
+ for b in kept:
391
+ d1 = math.hypot(a.x1-b.x1, a.y1-b.y1) + math.hypot(a.x2-b.x2, a.y2-b.y2)
392
+ d2 = math.hypot(a.x1-b.x2, a.y1-b.y2) + math.hypot(a.x2-b.x1, a.y2-b.y1)
393
+ if min(d1, d2) < tol_mm:
394
+ dup = True
395
+ break
396
+ if not dup:
397
+ kept.append(a)
398
+ return kept
399
+
400
+
401
+ def deduplicate_circles(circles: List[DXFCircle], tol_mm: float = 1.0) -> List[DXFCircle]:
402
+ kept = []
403
+ for a in circles:
404
+ dup = any(
405
+ math.hypot(a.cx-b.cx, a.cy-b.cy) < tol_mm and abs(a.r-b.r) < tol_mm
406
+ for b in kept
407
+ )
408
+ if not dup:
409
+ kept.append(a)
410
+ return kept
411
+
412
+
413
+ # ═══════════════════════════════════════════════════════════════
414
+ # STAGE 8 β€” DXF WRITER
415
+ # ═══════════════════════════════════════════════════════════════
416
+
417
+ LAYER_CFG = {
418
+ "GEOMETRY": {"color": 7, "lw": 25},
419
+ "CIRCLES": {"color": 4, "lw": 25},
420
+ "ARCS": {"color": 1, "lw": 25},
421
  }
422
 
423
+ def write_dxf(result: VectorResult, path: str):
424
+ doc = ezdxf.new(dxfversion="R2010")
425
+ doc.units = units.MM
426
+ msp = doc.modelspace()
427
+
428
+ for name, cfg in LAYER_CFG.items():
429
+ if name not in doc.layers:
430
+ doc.layers.add(name, dxfattribs={
431
+ "color": cfg["color"], "lineweight": cfg["lw"]})
432
+
433
+ for ln in result.lines:
434
+ msp.add_line((ln.x1, ln.y1), (ln.x2, ln.y2),
435
+ dxfattribs={"layer": ln.layer})
436
+
437
+ for pl in result.polylines:
438
+ if len(pl.pts) >= 2:
439
+ msp.add_lwpolyline(pl.pts, close=pl.closed,
440
+ dxfattribs={"layer": pl.layer})
441
+
442
+ for c in result.circles:
443
+ msp.add_circle((c.cx, c.cy), c.r, dxfattribs={"layer": c.layer})
444
+
445
+ for a in result.arcs:
446
+ msp.add_arc((a.cx, a.cy), a.r, a.start_angle, a.end_angle,
447
+ dxfattribs={"layer": a.layer})
448
+
449
+ doc.saveas(path)
450
+
451
+
452
+ # ═══════════════════════════════════════════════════════════════
453
+ # MAIN PIPELINE
454
+ # ═══════════════════════════════════════════════════════════════
455
+
456
+ def convert(input_path: str, output_path: str,
457
+ settings: dict = None, progress_cb=None) -> dict:
458
 
 
 
 
 
 
459
  s = {**DEFAULT_SETTINGS, **(settings or {})}
460
 
461
  def progress(msg, pct):
 
464
  else:
465
  print(f" [{pct:3d}%] {msg}")
466
 
467
+ # ── Load ───────────────────────────────────────────────────
468
  progress("Loading image…", 5)
469
  img = cv2.imread(input_path)
470
  if img is None:
471
+ raise FileNotFoundError(f"Cannot open: {input_path}")
472
  h0, w0 = img.shape[:2]
473
 
474
+ # ── Pre-process ────────────────────────────────────────────
475
+ progress("Pre-processing (threshold + denoise)…", 10)
476
+ binary, gray_up = preprocess(img, s)
477
+ h_up = gray_up.shape[0]
478
+ scale_up = s["upscale"]
479
+ mm = s["output_scale_mm"]
480
+
481
+ # ── Circle detection (contour-based, much more accurate) ───
482
+ progress("Detecting circles (contour circularity)…", 18)
483
+ dxf_circles, circle_mask_list = _detect_circles_contour(binary, s, h_up, scale_up, mm)
484
+ dxf_circles = deduplicate_circles(dxf_circles)
485
+
486
+ # ── Erase circles from binary so skeleton isn't polluted ───
487
+ progress("Erasing circles from binary…", 22)
488
+ binary_no_circles = binary.copy()
489
+ for (cx, cy, r) in circle_mask_list:
490
+ cv2.circle(binary_no_circles, (int(cx), int(cy)), int(r) + 8, 0, -1)
491
+
492
+ # ── Skeletonize ────────────────────────────────────────────
493
+ progress("Skeletonizing…", 30)
494
+ skel = skeletonize(binary_no_circles)
495
+
496
+ # ── Graph trace β†’ segments ─────────────────────────────────
497
+ progress("Tracing skeleton graph…", 45)
498
+ segments = trace_skeleton_to_segments(skel, s)
499
+ progress(f" β†’ {len(segments)} raw segments", 50)
500
+
501
+ # ── Classify segments ──────────────────────────────────────
502
+ progress("Classifying segments (line / arc / poly)…", 58)
503
+ dxf_lines = []
504
+ dxf_arcs = []
505
+ dxf_polys = []
506
+
507
+ for seg in segments:
508
+ kind, data = classify_segment(seg, s)
509
+
510
+ if kind == "line":
511
+ x1, y1, x2, y2 = data
512
+ x1m, y1m = px_to_mm(x1, y1, h_up, scale_up, mm)
513
+ x2m, y2m = px_to_mm(x2, y2, h_up, scale_up, mm)
514
+ dxf_lines.append(DXFLine(x1m, y1m, x2m, y2m))
515
+
516
+ elif kind == "arc":
517
+ cx, cy, r, sa, ea = data
518
+ cxm, cym = px_to_mm(cx, cy, h_up, scale_up, mm)
519
+ rm = (r / scale_up) * mm
520
+ dxf_arcs.append(DXFArc(cxm, cym, rm, sa, ea))
521
+
522
+ elif kind == "poly":
523
+ pts_mm = [px_to_mm(p[0], p[1], h_up, scale_up, mm) for p in data]
524
+ dxf_polys.append(DXFPolyline(pts_mm, closed=False))
525
+
526
+ # ── Deduplication ──────────────────────────────────────────
527
+ progress("Deduplicating…", 68)
528
+ dxf_lines = deduplicate_lines(dxf_lines)
529
+
530
+ # ── Build result ───────────────────────────────────────────
531
  result = VectorResult(
532
+ lines=dxf_lines,
533
+ polylines=dxf_polys,
534
+ circles=dxf_circles,
535
+ arcs=dxf_arcs,
536
+ source_w=w0,
537
+ source_h=h0,
 
538
  )
539
 
540
+ # ── Write DXF ──────────────────────────────────────────────
541
  progress("Writing DXF…", 80)
542
  write_dxf(result, output_path)
543
 
 
553
  return stats
554
 
555
 
556
+ # ═══════════════════════════════════════════════════════════════
557
+ # CLI
558
+ # ═══════════════════════════════════════════════════════════════
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
559
 
560
  if __name__ == "__main__":
561
+ parser = argparse.ArgumentParser(description="VectorForge v2 β€” PNG β†’ DXF")
562
+ parser.add_argument("input")
563
+ parser.add_argument("output")
564
+ parser.add_argument("--upscale", type=int, default=3)
565
+ parser.add_argument("--threshold", type=int, default=200, dest="threshold_value")
566
+ parser.add_argument("--denoise", type=int, default=8, dest="denoise_h")
567
+ parser.add_argument("--min-branch", type=int, default=12, dest="min_branch_len")
568
+ parser.add_argument("--straight-tol", type=float, default=1.5, dest="straightness_tol")
569
+ parser.add_argument("--scale-mm", type=float, default=0.1, dest="output_scale_mm")
 
 
 
 
570
  args = parser.parse_args()
571
 
572
+ overrides = {k: v for k, v in vars(args).items()
573
+ if k not in ("input", "output")}
574
+ stats = convert(args.input, args.output, overrides)
575
+ print("\nConversion stats:")
 
 
 
 
 
576
  for k, v in stats.items():
577
  print(f" {k}: {v}")
requirements.txt CHANGED
@@ -1,6 +1,7 @@
1
- gradio
2
- ezdxf
3
- opencv-python-headless
4
- scikit-image
5
- numpy
6
- scipy
 
 
1
+ gradio
2
+ ezdxf
3
+ opencv-python-headless
4
+ scikit-image
5
+ numpy
6
+ scipy
7
+ networkx