mmrech commited on
Commit
c85f5af
·
verified ·
1 Parent(s): e9066b6

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +661 -470
app.py CHANGED
@@ -1,18 +1,24 @@
1
  """
2
- Image Processing Studio + NPH Neuroimaging Analysis
3
- Unified Gradio app with filters, ML models, YOLO NPH detection, clinical scoring,
4
- and intensity-based NPH segmentation.
 
 
 
5
  """
6
 
7
  import gradio as gr
8
  import numpy as np
9
- from PIL import Image, ImageFilter, ImageEnhance, ImageOps, ImageDraw, ImageFont
10
  from transformers import pipeline
11
  import cv2
12
  import tempfile
13
  import os
14
  import threading
15
  import logging
 
 
 
16
 
17
  from segment_neuroimaging import (
18
  segment_nph, segment_ventricles, compute_evans_index,
@@ -27,28 +33,53 @@ from segment_neuroimaging import (
27
  logging.basicConfig(level=logging.INFO)
28
  logger = logging.getLogger(__name__)
29
 
30
- # ---- Load ML models (cached on first use) ----
31
- classifier = pipeline("image-classification", model="google/vit-base-patch16-224")
32
- detector = pipeline("object-detection", model="facebook/detr-resnet-50")
33
- segmenter = pipeline("image-segmentation", model="facebook/detr-resnet-50-panoptic")
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
- # ---- YOLO model for NPH structure detection ----
 
 
 
 
 
 
 
36
  YOLO_MODEL_PATH = "best.pt"
37
  _yolo_model = None
38
  _yolo_lock = threading.Lock()
39
 
40
- # Class-specific colors for YOLO detections (BGR for OpenCV, RGB for display)
41
  YOLO_COLORS = {
42
- "ventricle": (0, 150, 255), # bright blue
43
- "sylvian_fissure": (200, 100, 255), # purple
44
- "tight_convexity": (255, 150, 100), # orange
45
- "pvh": (255, 200, 0), # yellow
46
- "skull_inner": (200, 200, 200), # gray
 
 
 
 
 
 
 
 
47
  }
48
 
49
 
50
  def _get_yolo_model():
51
- """Lazy-load YOLOv8 model."""
52
  global _yolo_model
53
  if _yolo_model is None:
54
  with _yolo_lock:
@@ -62,12 +93,12 @@ def _get_yolo_model():
62
  return _yolo_model
63
 
64
 
 
 
 
 
65
  def _compute_nph_score(data: dict) -> dict:
66
- """
67
- Compute NPH probability score from structured metrics.
68
- Weighted formula: VSR(40%) + Evans Index(25%) + Callosal Angle(20%) + DESH(10%) + Sylvian(5%)
69
- With triad bonus (+15%) and cortical atrophy penalty (-30%).
70
- """
71
  score = 0.0
72
  evans = data.get("evansIndex") or 0.0
73
  callosal = data.get("callosalAngle")
@@ -91,7 +122,6 @@ def _compute_nph_score(data: dict) -> dict:
91
  if sylvian:
92
  score += 5
93
  else:
94
- # Redistribute VSR weight across remaining criteria
95
  scale = 100 / 60
96
  if evans > 0.3:
97
  score += 25 * scale * min((evans - 0.3) / 0.15, 1)
@@ -115,54 +145,93 @@ def _compute_nph_score(data: dict) -> dict:
115
  score = int(round(min(score, 100)))
116
 
117
  if score >= 75:
118
- label = "Probable NPH"
119
- recommendation = "Strongly consider CSF tap test and neurosurgical referral for VP shunt evaluation."
120
  elif score >= 50:
121
- label = "Possible NPH"
122
- recommendation = "CSF tap test recommended. Consider supplementary MRI for DESH confirmation."
123
  elif score >= 30:
124
- label = "Low Suspicion"
125
- recommendation = "NPH less likely. Consider alternative diagnoses. Follow-up imaging in 6 months if clinical concern persists."
126
  else:
127
- label = "Unlikely NPH"
128
- recommendation = "Ventriculomegaly likely ex-vacuo or other etiology. Investigate alternative causes of symptoms."
129
 
130
- return {"score": score, "label": label, "recommendation": recommendation}
131
 
132
 
133
  # ===========================================================================
134
- # Tab 1: NPH Neuroimaging Analysis (Intensity-based Segmentation)
135
  # ===========================================================================
136
 
137
- def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
138
- if image is None:
139
- raise gr.Error("Please upload a brain MRI or CT image first.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
140
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
142
  Image.fromarray(image).save(f.name)
143
  temp_path = f.name
144
 
145
  try:
146
  modality_map = {
147
- "Axial FLAIR": "FLAIR",
148
- "Axial T1": "T1",
149
- "Axial T2": "T2",
150
- "Coronal T2": "T2",
151
- "Axial T2 FFE": "T2",
152
- "Sagittal T1": "T1",
153
  "CT Head": "CT_HEAD",
154
  }
155
  mod_key = modality_map.get(modality, "T1")
156
  mod = Modality[mod_key]
157
  is_coronal = "Coronal" in modality
158
 
159
- pixel_spacing = None
160
- if pixel_spacing_str and pixel_spacing_str.strip():
161
- try:
162
- pixel_spacing = float(pixel_spacing_str.strip())
163
- except ValueError:
164
- pass
165
-
166
  img_rgb, gray, _ = preprocess_image(temp_path)
167
  h, w = gray.shape[:2]
168
  blurred = cv2.GaussianBlur(gray, (5, 5), 0)
@@ -170,14 +239,11 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
170
 
171
  orig_thresh = dict(VENTRICLE_THRESHOLDS[mod])
172
  sens_adj = (sensitivity - 50) / 50.0
173
-
174
  custom_thresholds = dict(orig_thresh)
175
  if CSF_MODE[mod] == CSFAppearance.DARK:
176
- custom_thresholds["csf_high"] = max(20, min(120,
177
- int(orig_thresh["csf_high"] + sens_adj * 30)))
178
  else:
179
- custom_thresholds["csf_low"] = max(100, min(220,
180
- int(orig_thresh["csf_low"] - sens_adj * 30)))
181
 
182
  vent_mask = segment_ventricles(gray, mod, roi_mask, custom_thresholds=custom_thresholds)
183
 
@@ -188,21 +254,20 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
188
  th_data = compute_temporal_horn_width(vent_mask, pixel_spacing)
189
  tv_data = compute_third_ventricle_width(vent_mask, pixel_spacing)
190
  desh_data = assess_desh(vent_mask, gray, roi_mask, mod, pixel_spacing)
191
-
192
- pvh_data = None
193
- if mod == Modality.FLAIR:
194
- pvh_data = score_pvh(gray, vent_mask)
195
-
196
  ca_data = compute_callosal_angle(vent_mask) if is_coronal else {}
197
 
198
  vent_area = int((vent_mask > 0).sum())
199
  brain_area = int((roi_mask > 0).sum())
200
- vent_brain_ratio = round(vent_area / brain_area, 4) if brain_area > 0 else 0
201
 
 
 
 
 
202
  display_masks = {"ventricles": vent_mask}
203
  parenchyma = cv2.bitwise_and(roi_mask, cv2.bitwise_not(vent_mask))
204
  display_masks["parenchyma"] = parenchyma
205
-
206
  if pvh_data and mod == Modality.FLAIR:
207
  display_masks["pvh"] = pvh_data["pvh_mask"]
208
  if "sylvian_mask" in desh_data:
@@ -210,22 +275,18 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
210
  if "convexity_mask" in desh_data:
211
  display_masks["high_convexity_sulci"] = desh_data["convexity_mask"]
212
 
213
- overlay = create_overlay(img_rgb, display_masks, alpha=overlay_alpha)
214
-
215
- biomarkers_for_annotation = dict(ei_data)
216
- biomarkers_for_annotation.update(th_data)
217
  if pvh_data:
218
- biomarkers_for_annotation["pvh_grade"] = pvh_data["pvh_grade"]
219
- biomarkers_for_annotation["is_desh_positive"] = desh_data["is_desh_positive"]
220
  if ca_data.get("callosal_angle_deg") is not None:
221
- biomarkers_for_annotation["callosal_angle_deg"] = ca_data["callosal_angle_deg"]
222
 
223
- annotated = add_annotations(
224
- overlay, display_masks,
225
- f"{modality} -- NPH Analysis",
226
- biomarkers_for_annotation,
227
- )
228
 
 
229
  row = ei_data.get("measurement_row", 0)
230
  if row > 0:
231
  cols = np.where(vent_mask[row, :] > 0)[0]
@@ -233,82 +294,42 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
233
  minX, maxX = int(cols[0]), int(cols[-1])
234
  cv2.line(annotated, (minX, row), (maxX, row), (255, 220, 0), 2)
235
  skull_d = ei_data.get("skull_diameter_px", w)
236
- center_x = w // 2
237
- half_skull = skull_d // 2
238
- cv2.line(annotated, (center_x - half_skull, row + 8),
239
- (center_x + half_skull, row + 8), (200, 200, 200), 1)
240
-
241
- comparison = create_comparison(img_rgb, annotated, f"{modality} -- NPH Analysis")
242
-
243
- report_lines = ["## NPH Biomarker Report\n"]
244
-
245
- ei = ei_data.get("evans_index", 0)
246
- status = "**ABNORMAL (>0.3)**" if ei > 0.3 else "Normal"
247
- report_lines.append(f"**Evans' Index:** {ei:.3f} -- {status}")
248
-
249
- if ei_data.get("frontal_horn_width_mm"):
250
- report_lines.append(f" - Frontal horn width: {ei_data['frontal_horn_width_mm']} mm")
251
- report_lines.append(f" - Skull diameter: {ei_data['skull_diameter_mm']} mm")
252
-
253
- thw = th_data.get("temporal_horn_width_px", 0)
254
- if thw > 0:
255
- thw_mm = th_data.get("temporal_horn_width_mm", "")
256
- mm_str = f" ({thw_mm} mm)" if thw_mm else ""
257
- report_lines.append(f"**Temporal Horn Width:** {thw} px{mm_str}")
258
-
259
- tvw = tv_data.get("third_ventricle_width_px", 0)
260
- if tvw > 0:
261
- tvw_mm = tv_data.get("third_ventricle_width_mm", "")
262
- mm_str = f" ({tvw_mm} mm)" if tvw_mm else ""
263
- report_lines.append(f"**Third Ventricle Width:** {tvw} px{mm_str}")
264
-
265
- report_lines.append(f"**Ventricle/Brain Ratio:** {vent_brain_ratio:.4f} ({vent_area} / {brain_area} px)")
266
-
267
- if pvh_data:
268
- grade_desc = {0: "None", 1: "Pencil-thin rim", 2: "Smooth halo", 3: "Irregular, deep WM"}
269
- report_lines.append(f"**PVH Grade:** {pvh_data['pvh_grade']}/3 -- {grade_desc.get(pvh_data['pvh_grade'], '')}")
270
- report_lines.append(f" - PVH ratio: {pvh_data['pvh_ratio']:.4f}")
271
-
272
- if ca_data.get("callosal_angle_deg") is not None:
273
- ca = ca_data["callosal_angle_deg"]
274
- ca_status = "Suggestive of NPH" if ca < 90 else ("Indeterminate" if ca < 120 else "Normal/ex vacuo")
275
- report_lines.append(f"**Callosal Angle:** {ca:.1f} deg -- {ca_status}")
276
-
277
- desh = desh_data.get("is_desh_positive", False)
278
- desh_str = "**POSITIVE**" if desh else "Negative"
279
- desh_score = desh_data.get("total_score", 0)
280
- report_lines.append(f"\n### DESH Assessment: {desh_str} (score: {desh_score}/6)")
281
- report_lines.append(f"- Ventriculomegaly: {desh_data.get('ventriculomegaly_score', 'N/A')}/2")
282
- report_lines.append(f"- Sylvian dilation: {desh_data.get('sylvian_dilation_score', 'N/A')}/2")
283
- report_lines.append(f"- Convexity tightness: {desh_data.get('convexity_tightness_score', 'N/A')}/2")
284
- scr = desh_data.get("sylvian_convexity_ratio", "N/A")
285
- report_lines.append(f"- Sylvian/Convexity ratio: {scr}")
286
-
287
- report_lines.append(f"\n---\n*Sensitivity: {sensitivity}% | Thresholds: CSF [{custom_thresholds['csf_low']}-{custom_thresholds['csf_high']}] | Pixel spacing: {pixel_spacing} mm/px*")
288
- report_lines.append("*Structures:* " + ", ".join(display_masks.keys()))
289
-
290
- report = "\n".join(report_lines)
291
- return annotated, comparison, report
292
-
293
  finally:
294
  os.unlink(temp_path)
295
 
296
 
297
- # ===========================================================================
298
- # Tab 2: YOLO NPH Detection
299
- # ===========================================================================
300
-
301
- def yolo_detect_nph(image, conf_threshold):
302
- """Run YOLO model on a brain scan to detect NPH structures."""
303
- if image is None:
304
- raise gr.Error("Please upload a brain CT or MRI image first.")
305
-
306
  model = _get_yolo_model()
307
  if model is None:
308
- raise gr.Error(
309
- "YOLO model (best.pt) not available. "
310
- "Make sure the model file is in the Space repository."
311
- )
312
 
313
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
314
  Image.fromarray(image).save(f.name)
@@ -316,10 +337,9 @@ def yolo_detect_nph(image, conf_threshold):
316
 
317
  try:
318
  results = model(temp_path, verbose=False)[0]
319
-
320
  h, w = image.shape[:2]
321
  annotated_img = image.copy()
322
- boxes_data = []
323
 
324
  for box in results.boxes:
325
  conf = float(box.conf[0])
@@ -330,121 +350,261 @@ def yolo_detect_nph(image, conf_threshold):
330
  cls_name = model.names.get(cls_id, str(cls_id))
331
  color = YOLO_COLORS.get(cls_name, (255, 255, 255))
332
 
333
- boxes_data.append({
334
- "class": cls_name,
335
- "x1": x1, "y1": y1, "x2": x2, "y2": y2,
336
  "confidence": round(conf, 4),
337
  })
338
-
339
- # Draw bounding box
340
  cv2.rectangle(annotated_img, (x1, y1), (x2, y2), color, 2)
341
-
342
- # Label background
343
  label = f"{cls_name} {conf:.0%}"
344
  (lw, lh), _ = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)
345
  cv2.rectangle(annotated_img, (x1, y1 - lh - 8), (x1 + lw + 4, y1), color, -1)
346
  cv2.putText(annotated_img, label, (x1 + 2, y1 - 4),
347
  cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)
348
 
349
- # Compute metrics from detected boxes
350
- metrics = _derive_yolo_metrics(boxes_data, w, h)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
351
 
352
- # Build Gradio annotations for the AnnotatedImage output
353
- annotations = []
354
- for b in boxes_data:
355
- annotations.append((
356
- (b["x1"], b["y1"], b["x2"], b["y2"]),
357
- f"{b['class']} ({b['confidence']:.0%})"
358
- ))
359
 
360
- # Build report
361
- report_lines = ["## YOLO NPH Detection Report\n"]
362
- report_lines.append(f"**Detections:** {len(boxes_data)} structures found at {conf_threshold:.0%} confidence\n")
 
363
 
364
- for b in boxes_data:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
365
  bw = b["x2"] - b["x1"]
366
  bh = b["y2"] - b["y1"]
367
- report_lines.append(f"- **{b['class']}**: {b['confidence']:.1%} confidence, {bw}x{bh} px at ({b['x1']},{b['y1']})")
368
-
369
- report_lines.append(f"\n### Derived Metrics")
370
- ei = metrics.get("evans_index", 0)
371
- ei_status = "ABNORMAL (>0.3)" if ei > 0.3 else "Normal"
372
- report_lines.append(f"**Evans' Index:** {ei:.3f} -- {ei_status}")
373
- report_lines.append(f"**DESH Score:** {metrics.get('desh_score', 0)}/3")
374
- report_lines.append(f"**Sylvian Dilation:** {'Yes' if metrics.get('sylvian_dilation') else 'No'}")
375
- report_lines.append(f"**PVH Detected:** {'Yes' if metrics.get('periventricular_changes') else 'No'}")
376
-
377
- prob = metrics.get("nph_probability", 0)
378
- report_lines.append(f"**NPH Probability:** {prob:.0%}")
379
-
380
- # Auto-compute NPH score from YOLO metrics
381
- score_input = {
382
- "evansIndex": metrics["evans_index"],
383
- "callosalAngle": metrics.get("callosal_angle"),
384
- "deshScore": metrics.get("desh_score", 0),
385
- "sylvianDilation": metrics.get("sylvian_dilation", False),
386
- "vsr": metrics.get("vsr"),
387
- "triad": [],
388
- "corticalAtrophy": metrics.get("cortical_atrophy", "unknown"),
389
- }
390
- nph_score = _compute_nph_score(score_input)
391
 
392
- report_lines.append(f"\n### Clinical NPH Score: **{nph_score['score']}/100** -- {nph_score['label']}")
393
- report_lines.append(f"*{nph_score['recommendation']}*")
394
 
395
- report = "\n".join(report_lines)
 
396
 
397
- return (image, annotations), annotated_img, report
398
 
399
- finally:
400
- os.unlink(temp_path)
401
 
 
 
 
402
 
403
- def _derive_yolo_metrics(boxes, image_width, image_height):
404
- """Compute NPH metrics from YOLO detection boxes."""
405
- ventricle = next((b for b in boxes if b["class"] == "ventricle"), None)
406
- skull = next((b for b in boxes if b["class"] == "skull_inner"), None)
407
 
408
- if ventricle and skull:
409
- vent_w = ventricle["x2"] - ventricle["x1"]
410
- skull_w = skull["x2"] - skull["x1"]
411
- evans_index = round(vent_w / skull_w, 4) if skull_w > 0 else 0.0
412
- elif ventricle:
413
- vent_w = ventricle["x2"] - ventricle["x1"]
414
- evans_index = round(vent_w / image_width, 4) if image_width > 0 else 0.0
415
- else:
416
- evans_index = 0.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
417
 
418
- desh_classes = {"tight_convexity", "sylvian_fissure", "pvh"}
419
- detected_desh = {b["class"] for b in boxes if b["class"] in desh_classes}
420
- desh_score = len(detected_desh)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
421
 
422
- sylvian_dilation = any(b["class"] == "sylvian_fissure" for b in boxes)
423
- periventricular_changes = any(b["class"] == "pvh" for b in boxes)
424
 
425
- return {
426
- "evans_index": evans_index,
427
- "callosal_angle": None,
428
- "desh_score": desh_score,
429
- "sylvian_dilation": sylvian_dilation,
430
- "vsr": None,
431
- "periventricular_changes": periventricular_changes,
432
- "cortical_atrophy": "unknown",
433
- "nph_probability": round(sum([evans_index > 0.3, desh_score >= 2, sylvian_dilation]) / 3, 4),
434
- }
 
 
 
 
 
 
 
 
 
 
 
 
435
 
436
 
437
  # ===========================================================================
438
- # Tab 3: NPH Clinical Scoring Calculator
439
  # ===========================================================================
440
 
441
  def compute_clinical_score(
442
- evans_index, callosal_angle_str, desh_score,
443
- sylvian_dilation, vsr_str,
444
- gait, cognition, urinary,
445
- cortical_atrophy
446
  ):
447
- """Interactive NPH clinical scoring calculator."""
448
  callosal = None
449
  if callosal_angle_str and callosal_angle_str.strip():
450
  try:
@@ -460,115 +620,187 @@ def compute_clinical_score(
460
  pass
461
 
462
  triad = [gait, cognition, urinary]
463
-
464
- atrophy_map = {
465
- "None/Mild": "none",
466
- "Moderate": "moderate",
467
- "Significant": "significant",
468
- }
469
 
470
  score_data = {
471
- "evansIndex": evans_index,
472
- "callosalAngle": callosal,
473
- "deshScore": int(desh_score),
474
- "sylvianDilation": sylvian_dilation,
475
- "vsr": vsr,
476
- "triad": triad,
477
  "corticalAtrophy": atrophy_map.get(cortical_atrophy, "unknown"),
478
  }
479
-
480
  result = _compute_nph_score(score_data)
481
 
482
- # Build detailed breakdown
483
- lines = []
484
- lines.append(f"# NPH Score: {result['score']}/100")
485
- lines.append(f"## {result['label']}\n")
486
- lines.append(f"{result['recommendation']}\n")
487
-
488
  lines.append("---\n### Input Summary\n")
489
  lines.append(f"- **Evans' Index:** {evans_index:.3f}" + (" (>0.3 = abnormal)" if evans_index > 0.3 else ""))
490
  if callosal is not None:
491
- lines.append(f"- **Callosal Angle:** {callosal:.1f} deg" + (" (<90 = suggestive)" if callosal < 90 else ""))
492
- else:
493
- lines.append("- **Callosal Angle:** Not provided")
494
  lines.append(f"- **DESH Score:** {int(desh_score)}/3")
495
  lines.append(f"- **Sylvian Dilation:** {'Yes' if sylvian_dilation else 'No'}")
496
  if vsr is not None:
497
  lines.append(f"- **VSR:** {vsr:.2f}" + (" (>2.0 = strong NPH indicator)" if vsr > 2.0 else ""))
498
- else:
499
- lines.append("- **VSR:** Not available")
500
  triad_count = sum(triad)
501
- lines.append(f"- **Hakim Triad:** {triad_count}/3 (Gait: {'Yes' if gait else 'No'}, Cognition: {'Yes' if cognition else 'No'}, Urinary: {'Yes' if urinary else 'No'})")
502
  lines.append(f"- **Cortical Atrophy:** {cortical_atrophy}")
503
 
504
- lines.append("\n---\n### Scoring Weights\n")
505
- if vsr is not None:
506
- lines.append("| Component | Weight | Status |")
507
- lines.append("|---|---|---|")
508
- lines.append(f"| VSR | 40% | {'Contributing' if vsr and vsr > 2.0 else 'Not met'} |")
509
- lines.append(f"| Evans Index | 25% | {'Contributing' if evans_index > 0.3 else 'Not met'} |")
510
- lines.append(f"| Callosal Angle | 20% | {'Contributing' if callosal and callosal < 90 else 'N/A' if callosal is None else 'Not met'} |")
511
- lines.append(f"| DESH Pattern | 10% | {int(desh_score)}/3 |")
512
- lines.append(f"| Sylvian Fissure | 5% | {'Contributing' if sylvian_dilation else 'Not met'} |")
513
- else:
514
- lines.append("*VSR not available -- weights redistributed across remaining criteria.*\n")
515
- lines.append("| Component | Weight (adjusted) | Status |")
516
- lines.append("|---|---|---|")
517
- lines.append(f"| Evans Index | 41.7% | {'Contributing' if evans_index > 0.3 else 'Not met'} |")
518
- lines.append(f"| Callosal Angle | 33.3% | {'Contributing' if callosal and callosal < 90 else 'N/A' if callosal is None else 'Not met'} |")
519
- lines.append(f"| DESH Pattern | 16.7% | {int(desh_score)}/3 |")
520
- lines.append(f"| Sylvian Fissure | 8.3% | {'Contributing' if sylvian_dilation else 'Not met'} |")
521
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
522
  if triad_count >= 2:
523
- lines.append(f"\n**Triad Bonus:** +{15 if triad_count == 3 else 5}% (Hakim triad {'complete' if triad_count == 3 else 'partial'})")
524
- if cortical_atrophy in ("Moderate", "Significant"):
525
- penalty = 30 if cortical_atrophy == "Significant" else 15
526
- lines.append(f"\n**Atrophy Penalty:** -{penalty}% (suggests ex-vacuo component)")
527
 
528
- return "\n".join(lines)
 
 
 
 
 
 
 
 
 
529
 
530
 
531
  # ===========================================================================
532
- # Tabs 4-8: Filters & ML Models
533
  # ===========================================================================
534
 
535
  def apply_filter(image, effect, intensity):
536
  if image is None:
537
  raise gr.Error("Please upload an image first.")
538
  img = Image.fromarray(image)
539
-
540
  if effect == "Grayscale":
541
  filtered = ImageOps.grayscale(img).convert("RGB")
542
- if intensity < 1.0:
543
- filtered = Image.blend(img, filtered, intensity)
544
  elif effect == "Sepia":
545
  gray = ImageOps.grayscale(img)
546
  sepia = ImageOps.colorize(gray, "#704214", "#C0A080")
547
  filtered = Image.blend(img, sepia, intensity)
548
  elif effect == "Blur":
549
- radius = int(intensity * 10)
550
- filtered = img.filter(ImageFilter.GaussianBlur(radius=max(1, radius)))
551
  elif effect == "Sharpen":
552
- enhancer = ImageEnhance.Sharpness(img)
553
- filtered = enhancer.enhance(1 + intensity * 4)
554
  elif effect == "Edge Detect":
555
- edges = img.filter(ImageFilter.FIND_EDGES)
556
- filtered = Image.blend(img, edges, intensity)
557
- elif effect == "Emboss":
558
- embossed = img.filter(ImageFilter.EMBOSS)
559
- filtered = Image.blend(img, embossed, intensity)
560
  elif effect == "Invert":
561
- inverted = ImageOps.invert(img.convert("RGB"))
562
- filtered = Image.blend(img, inverted, intensity)
563
- elif effect == "Posterize":
564
- bits = max(1, int(8 - intensity * 6))
565
- filtered = ImageOps.posterize(img.convert("RGB"), bits)
566
  elif effect == "Brightness":
567
- enhancer = ImageEnhance.Brightness(img)
568
- filtered = enhancer.enhance(0.5 + intensity * 1.5)
569
  elif effect == "Contrast":
570
- enhancer = ImageEnhance.Contrast(img)
571
- filtered = enhancer.enhance(0.5 + intensity * 2)
572
  else:
573
  filtered = img
574
  return np.array(filtered)
@@ -577,212 +809,137 @@ def apply_filter(image, effect, intensity):
577
  def classify_image(image):
578
  if image is None:
579
  raise gr.Error("Please upload an image first.")
580
- results = classifier(Image.fromarray(image))
581
- return {r["label"]: r["score"] for r in results}
582
 
583
  def detect_objects(image, threshold):
584
  if image is None:
585
  raise gr.Error("Please upload an image first.")
586
- results = detector(Image.fromarray(image), threshold=threshold)
587
- annotations = []
588
- for r in results:
589
- box = r["box"]
590
- annotations.append((
591
- (box["xmin"], box["ymin"], box["xmax"], box["ymax"]),
592
- f"{r['label']} ({r['score']:.0%})"
593
- ))
594
- return (image, annotations)
595
 
596
  def segment_image(image):
597
  if image is None:
598
  raise gr.Error("Please upload an image first.")
599
- results = segmenter(Image.fromarray(image))
600
- annotations = []
601
- for r in results:
602
- mask = np.array(r["mask"])
603
- annotations.append((mask, r["label"]))
604
- return (image, annotations)
605
 
606
 
607
  # ===========================================================================
608
  # Build the UI
609
  # ===========================================================================
610
 
611
- css = """
612
- .main-title { text-align: center; margin-bottom: 0.5em; }
613
- .subtitle { text-align: center; color: #666; margin-top: 0; }
614
- .nph-ref-table th, .nph-ref-table td { padding: 4px 12px; }
 
615
  """
616
 
617
- with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
618
- gr.Markdown("# Image Processing Studio", elem_classes="main-title")
619
  gr.Markdown(
620
- "Filters, classification, object detection, panoptic segmentation, **YOLO NPH detection**, "
621
- "clinical NPH scoring, and **intensity-based NPH analysis** -- all in one place.",
622
  elem_classes="subtitle"
623
  )
624
 
625
- # ── Tab 1: NPH Analysis (Intensity-based) ──
626
- with gr.Tab("NPH Analysis"):
627
  gr.Markdown(
628
- "### Normal Pressure Hydrocephalus -- Segmentation & Biomarkers\n"
629
- "Upload a brain MRI or CT scan. Computes Evans' index, DESH pattern, temporal horn width, "
630
- "callosal angle (coronal), PVH scoring (FLAIR), and ventricle/brain ratio.\n\n"
631
- "**Sensitivity slider** adjusts the CSF thresholds -- increase to capture more ventricle, "
632
- "decrease to be more conservative."
633
  )
634
  with gr.Row():
635
  with gr.Column(scale=1):
636
- nph_input = gr.Image(label="Upload Brain Scan", type="numpy")
637
- nph_modality = gr.Dropdown(
638
  choices=["Axial FLAIR", "Axial T1", "Axial T2", "Coronal T2",
639
  "Axial T2 FFE", "Sagittal T1", "CT Head"],
640
- value="Axial FLAIR",
641
- label="Modality / Sequence"
642
- )
643
- nph_sensitivity = gr.Slider(
644
- minimum=10, maximum=90, value=50, step=5,
645
- label="Sensitivity (%)"
646
- )
647
- nph_alpha = gr.Slider(
648
- minimum=0.1, maximum=0.9, value=0.45, step=0.05,
649
- label="Overlay Opacity"
650
  )
651
- nph_spacing = gr.Textbox(
652
- label="Pixel Spacing (mm/px)",
653
- placeholder="e.g. 0.5 (leave blank for auto-estimate)",
654
- value=""
655
- )
656
- nph_btn = gr.Button("Analyze for NPH", variant="primary", size="lg")
657
 
658
  with gr.Column(scale=2):
659
- nph_overlay = gr.Image(label="Segmentation Overlay", type="numpy")
660
- nph_comparison = gr.Image(label="Side-by-Side Comparison", type="numpy")
661
-
662
- nph_report = gr.Markdown(label="Biomarker Report")
663
-
664
- nph_btn.click(
665
- fn=analyze_nph,
666
- inputs=[nph_input, nph_modality, nph_sensitivity, nph_alpha, nph_spacing],
667
- outputs=[nph_overlay, nph_comparison, nph_report]
 
 
 
 
 
668
  )
669
 
670
- with gr.Accordion("NPH Reference Values & Interpretation Guide", open=False):
671
  gr.Markdown(
672
- "| Biomarker | Normal | Suggestive of NPH | Strongly suggestive |\n"
673
- "|---|---|---|---|\n"
674
- "| Evans' Index | < 0.3 | > 0.3 | > 0.33 |\n"
675
- "| Callosal Angle | > 120 deg | < 90 deg | < 60 deg |\n"
676
- "| Temporal Horn | < 2 mm | 2-5 mm | > 5 mm |\n"
677
- "| Third Ventricle | < 5 mm | 5-10 mm | > 10 mm |\n"
678
- "| PVH (FLAIR) | Grade 0 | Grade 2 | Grade 3 |\n"
679
- "| DESH Pattern | Absent | -- | Present |\n\n"
680
- "**DESH** (Disproportionately Enlarged Subarachnoid-space Hydrocephalus): "
681
- "Enlarged sylvian fissures + tight high-convexity sulci + ventriculomegaly. "
682
- "This pattern distinguishes iNPH from Alzheimer's and normal aging.\n\n"
683
- "**Color Legend:** "
684
- "Blue = Ventricles | Green = Parenchyma | Yellow = PVH | "
685
- "Purple = Sylvian fissures | Orange = High-convexity sulci\n\n"
686
- "*Note: Measurements from JPEG/PNG images without DICOM metadata are approximate. "
687
- "For clinical use, provide pixel spacing from the DICOM header.*"
688
  )
689
 
690
- # ── Tab 2: YOLO NPH Detection ──
691
- with gr.Tab("YOLO NPH Detection"):
692
  gr.Markdown(
693
- "### Deep Learning NPH Structure Detection\n"
694
- "Uses a trained YOLOv8 model to detect NPH-related structures on brain CT/MRI slices: "
695
- "**ventricle**, **sylvian fissure**, **tight convexity**, **PVH**, and **skull inner boundary**.\n\n"
696
- "The model outputs bounding boxes with confidence scores, computes Evans' Index from "
697
- "detected structures, and generates an overall NPH clinical score."
698
  )
699
  with gr.Row():
700
- with gr.Column(scale=1):
701
- yolo_input = gr.Image(label="Upload Brain Scan", type="numpy")
702
- yolo_conf = gr.Slider(
703
- minimum=0.1, maximum=0.95, value=0.25, step=0.05,
704
- label="Confidence Threshold"
705
  )
706
- yolo_btn = gr.Button("Detect NPH Structures", variant="primary", size="lg")
707
-
708
- with gr.Column(scale=2):
709
- yolo_annotated = gr.AnnotatedImage(label="Detected Structures")
710
- yolo_overlay = gr.Image(label="Annotated Image", type="numpy")
711
-
712
- yolo_report = gr.Markdown(label="YOLO Detection Report")
713
-
714
- yolo_btn.click(
715
- fn=yolo_detect_nph,
716
- inputs=[yolo_input, yolo_conf],
717
- outputs=[yolo_annotated, yolo_overlay, yolo_report]
718
- )
719
 
720
- with gr.Accordion("YOLO Model Details", open=False):
721
- gr.Markdown(
722
- "**Model:** YOLOv8 fine-tuned on NPH brain CT/MRI dataset\n\n"
723
- "**Detected Classes:**\n\n"
724
- "| Class | Description | Color |\n"
725
- "|---|---|---|\n"
726
- "| ventricle | Lateral ventricles | Blue |\n"
727
- "| sylvian_fissure | Sylvian fissures (bilateral) | Purple |\n"
728
- "| tight_convexity | Tight high-convexity sulci | Orange |\n"
729
- "| pvh | Periventricular hyperintensities | Yellow |\n"
730
- "| skull_inner | Inner skull boundary | Gray |\n\n"
731
- "**Evans' Index** is computed from the ventricle and skull inner boundary boxes. "
732
- "If no skull boundary is detected, the image width is used as fallback.\n\n"
733
- "**NPH Score** is computed using the weighted formula: "
734
- "VSR (40%) + Evans Index (25%) + Callosal Angle (20%) + DESH (10%) + Sylvian (5%), "
735
- "with bonuses for Hakim triad and penalties for cortical atrophy."
736
- )
737
 
738
- # ── Tab 3: NPH Clinical Scoring Calculator ──
739
  with gr.Tab("NPH Score Calculator"):
740
  gr.Markdown(
741
  "### Clinical NPH Scoring Calculator\n"
742
- "Enter imaging biomarkers and clinical findings to compute a weighted NPH probability score.\n\n"
743
- "This calculator uses the same scoring formula as the YOLO detection tab but lets you "
744
- "input values manually -- useful for combining measurements from different imaging studies."
745
  )
746
  with gr.Row():
747
  with gr.Column():
748
  gr.Markdown("#### Imaging Biomarkers")
749
- calc_evans = gr.Slider(
750
- minimum=0.0, maximum=0.6, value=0.30, step=0.01,
751
- label="Evans' Index"
752
- )
753
- calc_callosal = gr.Textbox(
754
- label="Callosal Angle (degrees)",
755
- placeholder="e.g. 85 (leave blank if not measured)",
756
- value=""
757
- )
758
- calc_desh = gr.Slider(
759
- minimum=0, maximum=3, value=0, step=1,
760
- label="DESH Score (0-3)"
761
- )
762
  calc_sylvian = gr.Checkbox(label="Sylvian Fissure Dilation", value=False)
763
- calc_vsr = gr.Textbox(
764
- label="VSR (Ventricle-to-SAS Ratio)",
765
- placeholder="e.g. 2.5 (leave blank if not measured)",
766
- value=""
767
- )
768
-
769
  with gr.Column():
770
  gr.Markdown("#### Clinical Findings (Hakim Triad)")
771
  calc_gait = gr.Checkbox(label="Gait disturbance", value=False)
772
  calc_cognition = gr.Checkbox(label="Cognitive impairment", value=False)
773
  calc_urinary = gr.Checkbox(label="Urinary incontinence", value=False)
774
-
775
  gr.Markdown("#### Modifiers")
776
- calc_atrophy = gr.Radio(
777
- choices=["None/Mild", "Moderate", "Significant"],
778
- value="None/Mild",
779
- label="Cortical Atrophy"
780
- )
781
-
782
  calc_btn = gr.Button("Calculate NPH Score", variant="primary", size="lg")
783
 
784
- calc_report = gr.Markdown(label="NPH Score Report")
785
-
786
  calc_btn.click(
787
  fn=compute_clinical_score,
788
  inputs=[calc_evans, calc_callosal, calc_desh, calc_sylvian, calc_vsr,
@@ -790,13 +947,48 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
790
  outputs=calc_report
791
  )
792
 
793
- # ── Tab 4: Client-Side NPH Detector ──
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
794
  with gr.Tab("NPH Detector (Browser)"):
795
  gr.Markdown(
796
- "### Client-Side NPH Detector\n"
797
- "This tab runs the full NPH segmentation pipeline **entirely in your browser** using JavaScript. "
798
- "No data is sent to any server -- everything stays on your device.\n\n"
799
- "Upload a brain scan below and select the modality."
800
  )
801
  gr.HTML(
802
  value='<iframe src="https://mmrech-nph-detector-js.hf.space" '
@@ -805,35 +997,27 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
805
  'style="border-radius: 12px; border: 1px solid #333;"></iframe>',
806
  )
807
 
808
- # ── Tab 5: Video Demo ──
809
  with gr.Tab("Video Demo"):
810
- gr.Markdown(
811
- "### Whole-Brain Segmentation Demo\n"
812
- "Watch a slice-by-slice ventricle segmentation across a full MRI series."
813
- )
814
- gr.Video(
815
- value="examples/hydromorph_whole_brain_segmentation.mp4",
816
- label="NPH Segmentation Video",
817
- autoplay=False,
818
- )
819
 
820
- # ── Tab 6: Filters ──
821
  with gr.Tab("Filters & Effects"):
822
  with gr.Row():
823
  with gr.Column():
824
  filter_input = gr.Image(label="Upload Image", type="numpy")
825
  filter_effect = gr.Dropdown(
826
- choices=["Grayscale", "Sepia", "Blur", "Sharpen", "Edge Detect",
827
- "Emboss", "Invert", "Posterize", "Brightness", "Contrast"],
828
  value="Sepia", label="Effect"
829
  )
830
- filter_intensity = gr.Slider(minimum=0.0, maximum=1.0, value=0.7, step=0.05, label="Intensity")
831
  filter_btn = gr.Button("Apply Filter", variant="primary")
832
  with gr.Column():
833
  filter_output = gr.Image(label="Result", type="numpy")
834
  filter_btn.click(fn=apply_filter, inputs=[filter_input, filter_effect, filter_intensity], outputs=filter_output)
835
 
836
- # ── Tab 7: Classification ──
837
  with gr.Tab("Image Classification"):
838
  with gr.Row():
839
  with gr.Column():
@@ -843,18 +1027,18 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
843
  cls_output = gr.Label(label="Predictions", num_top_classes=5)
844
  cls_btn.click(fn=classify_image, inputs=cls_input, outputs=cls_output)
845
 
846
- # ── Tab 8: Object Detection ──
847
  with gr.Tab("Object Detection"):
848
  with gr.Row():
849
  with gr.Column():
850
  det_input = gr.Image(label="Upload Image", type="numpy")
851
- det_threshold = gr.Slider(minimum=0.1, maximum=0.95, value=0.5, step=0.05, label="Confidence Threshold")
852
  det_btn = gr.Button("Detect Objects", variant="primary")
853
  with gr.Column():
854
  det_output = gr.AnnotatedImage(label="Detections")
855
  det_btn.click(fn=detect_objects, inputs=[det_input, det_threshold], outputs=det_output)
856
 
857
- # ── Tab 9: Segmentation ──
858
  with gr.Tab("Segmentation"):
859
  with gr.Row():
860
  with gr.Column():
@@ -864,4 +1048,11 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
864
  seg_output = gr.AnnotatedImage(label="Segmentation Map")
865
  seg_btn.click(fn=segment_image, inputs=seg_input, outputs=seg_output)
866
 
 
 
 
 
 
 
 
867
  demo.launch()
 
1
  """
2
+ NPH Diagnostic Platform v3.0
3
+ Unified Gradio app combining intensity segmentation, YOLO detection,
4
+ dual-engine comparison, ensemble scoring, clinical calculator,
5
+ multi-slice batch analysis, quality assessment, and report generation.
6
+
7
+ Author: Matheus Rech, MD
8
  """
9
 
10
  import gradio as gr
11
  import numpy as np
12
+ from PIL import Image, ImageFilter, ImageEnhance, ImageOps, ImageDraw
13
  from transformers import pipeline
14
  import cv2
15
  import tempfile
16
  import os
17
  import threading
18
  import logging
19
+ import time
20
+ import json
21
+ from datetime import datetime
22
 
23
  from segment_neuroimaging import (
24
  segment_nph, segment_ventricles, compute_evans_index,
 
33
  logging.basicConfig(level=logging.INFO)
34
  logger = logging.getLogger(__name__)
35
 
36
+ # ---- ML models (lazy-loaded) ----
37
+ _classifier = None
38
+ _detector = None
39
+ _segmenter = None
40
+
41
+ def get_classifier():
42
+ global _classifier
43
+ if _classifier is None:
44
+ _classifier = pipeline("image-classification", model="google/vit-base-patch16-224")
45
+ return _classifier
46
+
47
+ def get_detector():
48
+ global _detector
49
+ if _detector is None:
50
+ _detector = pipeline("object-detection", model="facebook/detr-resnet-50")
51
+ return _detector
52
 
53
+ def get_segmenter():
54
+ global _segmenter
55
+ if _segmenter is None:
56
+ _segmenter = pipeline("image-segmentation", model="facebook/detr-resnet-50-panoptic")
57
+ return _segmenter
58
+
59
+
60
+ # ---- YOLO model ----
61
  YOLO_MODEL_PATH = "best.pt"
62
  _yolo_model = None
63
  _yolo_lock = threading.Lock()
64
 
 
65
  YOLO_COLORS = {
66
+ "ventricle": (0, 150, 255),
67
+ "sylvian_fissure": (200, 100, 255),
68
+ "tight_convexity": (255, 150, 100),
69
+ "pvh": (255, 200, 0),
70
+ "skull_inner": (200, 200, 200),
71
+ }
72
+
73
+ YOLO_COLOR_HEX = {
74
+ "ventricle": "#0096FF",
75
+ "sylvian_fissure": "#C864FF",
76
+ "tight_convexity": "#FF9664",
77
+ "pvh": "#FFC800",
78
+ "skull_inner": "#C8C8C8",
79
  }
80
 
81
 
82
  def _get_yolo_model():
 
83
  global _yolo_model
84
  if _yolo_model is None:
85
  with _yolo_lock:
 
93
  return _yolo_model
94
 
95
 
96
+ # ===========================================================================
97
+ # Shared: NPH scoring
98
+ # ===========================================================================
99
+
100
  def _compute_nph_score(data: dict) -> dict:
101
+ """Weighted NPH scoring: VSR(40%) + EI(25%) + CA(20%) + DESH(10%) + Sylvian(5%)."""
 
 
 
 
102
  score = 0.0
103
  evans = data.get("evansIndex") or 0.0
104
  callosal = data.get("callosalAngle")
 
122
  if sylvian:
123
  score += 5
124
  else:
 
125
  scale = 100 / 60
126
  if evans > 0.3:
127
  score += 25 * scale * min((evans - 0.3) / 0.15, 1)
 
145
  score = int(round(min(score, 100)))
146
 
147
  if score >= 75:
148
+ label, color = "Probable NPH", "#ef4444"
149
+ rec = "Strongly consider CSF tap test and neurosurgical referral for VP shunt evaluation."
150
  elif score >= 50:
151
+ label, color = "Possible NPH", "#f59e0b"
152
+ rec = "CSF tap test recommended. Consider supplementary MRI for DESH confirmation."
153
  elif score >= 30:
154
+ label, color = "Low Suspicion", "#3b82f6"
155
+ rec = "NPH less likely. Consider alternative diagnoses. Follow-up imaging in 6 months."
156
  else:
157
+ label, color = "Unlikely NPH", "#6b7280"
158
+ rec = "Ventriculomegaly likely ex-vacuo or other etiology. Investigate alternative causes."
159
 
160
+ return {"score": score, "label": label, "color": color, "recommendation": rec}
161
 
162
 
163
  # ===========================================================================
164
+ # Shared: Image quality assessment
165
  # ===========================================================================
166
 
167
+ def assess_quality(gray):
168
+ """Return a quality dict: sharpness, contrast, noise, overall grade."""
169
+ laplacian_var = cv2.Laplacian(gray, cv2.CV_64F).var()
170
+ contrast = float(gray.std())
171
+ noise_est = 0
172
+ if gray.shape[0] > 3 and gray.shape[1] > 3:
173
+ kernel = np.array([[1, -2, 1], [-2, 4, -2], [1, -2, 1]])
174
+ sigma = np.abs(cv2.filter2D(gray.astype(np.float64), -1, kernel)).sum()
175
+ noise_est = sigma * np.sqrt(0.5 * np.pi) / (6 * (gray.shape[0] - 2) * (gray.shape[1] - 2))
176
+
177
+ sharpness_score = min(100, laplacian_var / 5)
178
+ contrast_score = min(100, contrast * 2)
179
+ noise_score = max(0, 100 - noise_est * 10)
180
+ overall = (sharpness_score * 0.4 + contrast_score * 0.35 + noise_score * 0.25)
181
+
182
+ if overall >= 70:
183
+ grade = "Good"
184
+ elif overall >= 40:
185
+ grade = "Acceptable"
186
+ else:
187
+ grade = "Poor"
188
+
189
+ return {
190
+ "sharpness": round(sharpness_score, 1),
191
+ "contrast": round(contrast_score, 1),
192
+ "noise": round(noise_score, 1),
193
+ "overall": round(overall, 1),
194
+ "grade": grade,
195
+ }
196
 
197
+
198
+ def compute_symmetry_score(mask):
199
+ """Score left-right symmetry of a binary mask (0-100)."""
200
+ h, w = mask.shape[:2]
201
+ mid = w // 2
202
+ left = mask[:, :mid]
203
+ right = np.fliplr(mask[:, mid:mid + left.shape[1]])
204
+ if left.shape != right.shape:
205
+ min_w = min(left.shape[1], right.shape[1])
206
+ left = left[:, :min_w]
207
+ right = right[:, :min_w]
208
+ intersection = np.logical_and(left > 0, right > 0).sum()
209
+ union = np.logical_or(left > 0, right > 0).sum()
210
+ if union == 0:
211
+ return 0.0
212
+ return round(intersection / union * 100, 1)
213
+
214
+
215
+ # ===========================================================================
216
+ # Tab 1: Dual-Engine NPH Analysis (the main innovation)
217
+ # ===========================================================================
218
+
219
+ def _run_intensity_engine(image, modality, sensitivity, pixel_spacing):
220
+ """Run the intensity-based segmentation engine."""
221
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
222
  Image.fromarray(image).save(f.name)
223
  temp_path = f.name
224
 
225
  try:
226
  modality_map = {
227
+ "Axial FLAIR": "FLAIR", "Axial T1": "T1", "Axial T2": "T2",
228
+ "Coronal T2": "T2", "Axial T2 FFE": "T2", "Sagittal T1": "T1",
 
 
 
 
229
  "CT Head": "CT_HEAD",
230
  }
231
  mod_key = modality_map.get(modality, "T1")
232
  mod = Modality[mod_key]
233
  is_coronal = "Coronal" in modality
234
 
 
 
 
 
 
 
 
235
  img_rgb, gray, _ = preprocess_image(temp_path)
236
  h, w = gray.shape[:2]
237
  blurred = cv2.GaussianBlur(gray, (5, 5), 0)
 
239
 
240
  orig_thresh = dict(VENTRICLE_THRESHOLDS[mod])
241
  sens_adj = (sensitivity - 50) / 50.0
 
242
  custom_thresholds = dict(orig_thresh)
243
  if CSF_MODE[mod] == CSFAppearance.DARK:
244
+ custom_thresholds["csf_high"] = max(20, min(120, int(orig_thresh["csf_high"] + sens_adj * 30)))
 
245
  else:
246
+ custom_thresholds["csf_low"] = max(100, min(220, int(orig_thresh["csf_low"] - sens_adj * 30)))
 
247
 
248
  vent_mask = segment_ventricles(gray, mod, roi_mask, custom_thresholds=custom_thresholds)
249
 
 
254
  th_data = compute_temporal_horn_width(vent_mask, pixel_spacing)
255
  tv_data = compute_third_ventricle_width(vent_mask, pixel_spacing)
256
  desh_data = assess_desh(vent_mask, gray, roi_mask, mod, pixel_spacing)
257
+ pvh_data = score_pvh(gray, vent_mask) if mod == Modality.FLAIR else None
 
 
 
 
258
  ca_data = compute_callosal_angle(vent_mask) if is_coronal else {}
259
 
260
  vent_area = int((vent_mask > 0).sum())
261
  brain_area = int((roi_mask > 0).sum())
262
+ vb_ratio = round(vent_area / brain_area, 4) if brain_area > 0 else 0
263
 
264
+ quality = assess_quality(gray)
265
+ symmetry = compute_symmetry_score(vent_mask)
266
+
267
+ # Build overlay
268
  display_masks = {"ventricles": vent_mask}
269
  parenchyma = cv2.bitwise_and(roi_mask, cv2.bitwise_not(vent_mask))
270
  display_masks["parenchyma"] = parenchyma
 
271
  if pvh_data and mod == Modality.FLAIR:
272
  display_masks["pvh"] = pvh_data["pvh_mask"]
273
  if "sylvian_mask" in desh_data:
 
275
  if "convexity_mask" in desh_data:
276
  display_masks["high_convexity_sulci"] = desh_data["convexity_mask"]
277
 
278
+ overlay = create_overlay(img_rgb, display_masks, alpha=0.45)
279
+ biomarkers = dict(ei_data)
280
+ biomarkers.update(th_data)
 
281
  if pvh_data:
282
+ biomarkers["pvh_grade"] = pvh_data["pvh_grade"]
283
+ biomarkers["is_desh_positive"] = desh_data["is_desh_positive"]
284
  if ca_data.get("callosal_angle_deg") is not None:
285
+ biomarkers["callosal_angle_deg"] = ca_data["callosal_angle_deg"]
286
 
287
+ annotated = add_annotations(overlay, display_masks, f"{modality} -- Intensity Engine", biomarkers)
 
 
 
 
288
 
289
+ # Draw Evans' index line
290
  row = ei_data.get("measurement_row", 0)
291
  if row > 0:
292
  cols = np.where(vent_mask[row, :] > 0)[0]
 
294
  minX, maxX = int(cols[0]), int(cols[-1])
295
  cv2.line(annotated, (minX, row), (maxX, row), (255, 220, 0), 2)
296
  skull_d = ei_data.get("skull_diameter_px", w)
297
+ cx = w // 2
298
+ hs = skull_d // 2
299
+ cv2.line(annotated, (cx - hs, row + 8), (cx + hs, row + 8), (200, 200, 200), 1)
300
+
301
+ return {
302
+ "annotated": annotated,
303
+ "evans_index": ei_data.get("evans_index", 0),
304
+ "frontal_horn_mm": ei_data.get("frontal_horn_width_mm"),
305
+ "skull_diameter_mm": ei_data.get("skull_diameter_mm"),
306
+ "temporal_horn_px": th_data.get("temporal_horn_width_px", 0),
307
+ "temporal_horn_mm": th_data.get("temporal_horn_width_mm"),
308
+ "third_ventricle_px": tv_data.get("third_ventricle_width_px", 0),
309
+ "third_ventricle_mm": tv_data.get("third_ventricle_width_mm"),
310
+ "vb_ratio": vb_ratio,
311
+ "vent_area": vent_area,
312
+ "brain_area": brain_area,
313
+ "desh_positive": desh_data.get("is_desh_positive", False),
314
+ "desh_score": desh_data.get("total_score", 0),
315
+ "desh_ventriculomegaly": desh_data.get("ventriculomegaly_score", 0),
316
+ "desh_sylvian": desh_data.get("sylvian_dilation_score", 0),
317
+ "desh_convexity": desh_data.get("convexity_tightness_score", 0),
318
+ "pvh_grade": pvh_data["pvh_grade"] if pvh_data else None,
319
+ "pvh_ratio": pvh_data["pvh_ratio"] if pvh_data else None,
320
+ "callosal_angle": ca_data.get("callosal_angle_deg"),
321
+ "quality": quality,
322
+ "symmetry": symmetry,
323
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
324
  finally:
325
  os.unlink(temp_path)
326
 
327
 
328
+ def _run_yolo_engine(image, conf_threshold=0.25):
329
+ """Run the YOLO detection engine."""
 
 
 
 
 
 
 
330
  model = _get_yolo_model()
331
  if model is None:
332
+ return None
 
 
 
333
 
334
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
335
  Image.fromarray(image).save(f.name)
 
337
 
338
  try:
339
  results = model(temp_path, verbose=False)[0]
 
340
  h, w = image.shape[:2]
341
  annotated_img = image.copy()
342
+ boxes = []
343
 
344
  for box in results.boxes:
345
  conf = float(box.conf[0])
 
350
  cls_name = model.names.get(cls_id, str(cls_id))
351
  color = YOLO_COLORS.get(cls_name, (255, 255, 255))
352
 
353
+ boxes.append({
354
+ "class": cls_name, "x1": x1, "y1": y1, "x2": x2, "y2": y2,
 
355
  "confidence": round(conf, 4),
356
  })
 
 
357
  cv2.rectangle(annotated_img, (x1, y1), (x2, y2), color, 2)
 
 
358
  label = f"{cls_name} {conf:.0%}"
359
  (lw, lh), _ = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)
360
  cv2.rectangle(annotated_img, (x1, y1 - lh - 8), (x1 + lw + 4, y1), color, -1)
361
  cv2.putText(annotated_img, label, (x1 + 2, y1 - 4),
362
  cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)
363
 
364
+ # Derive metrics
365
+ ventricle = next((b for b in boxes if b["class"] == "ventricle"), None)
366
+ skull = next((b for b in boxes if b["class"] == "skull_inner"), None)
367
+
368
+ if ventricle and skull:
369
+ vent_w = ventricle["x2"] - ventricle["x1"]
370
+ skull_w = skull["x2"] - skull["x1"]
371
+ ei = round(vent_w / skull_w, 4) if skull_w > 0 else 0
372
+ elif ventricle:
373
+ ei = round((ventricle["x2"] - ventricle["x1"]) / w, 4)
374
+ else:
375
+ ei = 0
376
+
377
+ desh_classes = {"tight_convexity", "sylvian_fissure", "pvh"}
378
+ detected_desh = {b["class"] for b in boxes if b["class"] in desh_classes}
379
+ sylvian = any(b["class"] == "sylvian_fissure" for b in boxes)
380
+ pvh = any(b["class"] == "pvh" for b in boxes)
381
+
382
+ return {
383
+ "annotated": annotated_img,
384
+ "boxes": boxes,
385
+ "evans_index": ei,
386
+ "desh_score": len(detected_desh),
387
+ "sylvian_dilation": sylvian,
388
+ "pvh_detected": pvh,
389
+ "n_detections": len(boxes),
390
+ }
391
+ finally:
392
+ os.unlink(temp_path)
393
 
 
 
 
 
 
 
 
394
 
395
+ def dual_engine_analyze(image, modality, sensitivity, pixel_spacing_str, yolo_conf):
396
+ """Run BOTH engines and produce comparison + ensemble score."""
397
+ if image is None:
398
+ raise gr.Error("Please upload a brain MRI or CT image first.")
399
 
400
+ pixel_spacing = None
401
+ if pixel_spacing_str and pixel_spacing_str.strip():
402
+ try:
403
+ pixel_spacing = float(pixel_spacing_str.strip())
404
+ except ValueError:
405
+ pass
406
+
407
+ # Run intensity engine
408
+ t0 = time.time()
409
+ intensity = _run_intensity_engine(image, modality, sensitivity, pixel_spacing)
410
+ t_intensity = round(time.time() - t0, 2)
411
+
412
+ # Run YOLO engine
413
+ t0 = time.time()
414
+ yolo = _run_yolo_engine(image, yolo_conf)
415
+ t_yolo = round(time.time() - t0, 2)
416
+
417
+ # Ensemble: average the Evans' Index from both engines
418
+ ei_intensity = intensity["evans_index"]
419
+ ei_yolo = yolo["evans_index"] if yolo else 0
420
+ if yolo:
421
+ ei_ensemble = round((ei_intensity * 0.6 + ei_yolo * 0.4), 4)
422
+ else:
423
+ ei_ensemble = ei_intensity
424
+
425
+ # DESH ensemble
426
+ desh_intensity_score = intensity["desh_score"]
427
+ desh_yolo_score = yolo["desh_score"] if yolo else 0
428
+ desh_ensemble = max(desh_intensity_score, desh_yolo_score)
429
+ sylvian_ensemble = intensity["desh_sylvian"] > 0 or (yolo and yolo["sylvian_dilation"])
430
+
431
+ # Compute ensemble NPH score
432
+ score_input = {
433
+ "evansIndex": ei_ensemble,
434
+ "callosalAngle": intensity.get("callosal_angle"),
435
+ "deshScore": desh_ensemble,
436
+ "sylvianDilation": sylvian_ensemble,
437
+ "vsr": None,
438
+ "triad": [],
439
+ "corticalAtrophy": "unknown",
440
+ }
441
+ nph_result = _compute_nph_score(score_input)
442
+
443
+ # Build report
444
+ q = intensity["quality"]
445
+ lines = []
446
+ lines.append("# Dual-Engine NPH Analysis Report\n")
447
+ lines.append(f"**Date:** {datetime.now().strftime('%Y-%m-%d %H:%M')} | **Modality:** {modality}\n")
448
+
449
+ lines.append("---\n## Image Quality Assessment\n")
450
+ lines.append(f"| Metric | Score |")
451
+ lines.append(f"|---|---|")
452
+ lines.append(f"| Sharpness | {q['sharpness']}/100 |")
453
+ lines.append(f"| Contrast | {q['contrast']}/100 |")
454
+ lines.append(f"| Noise | {q['noise']}/100 |")
455
+ lines.append(f"| **Overall** | **{q['overall']}/100 ({q['grade']})** |")
456
+ lines.append(f"| Symmetry | {intensity['symmetry']}% |")
457
+
458
+ lines.append("\n---\n## Engine Comparison\n")
459
+ lines.append("| Metric | Intensity Engine | YOLO Engine | Ensemble |")
460
+ lines.append("|---|---|---|---|")
461
+ ei_i_status = "abnormal" if ei_intensity > 0.3 else "normal"
462
+ ei_y_status = ("abnormal" if ei_yolo > 0.3 else "normal") if yolo else "N/A"
463
+ ei_e_status = "ABNORMAL" if ei_ensemble > 0.3 else "normal"
464
+ lines.append(f"| Evans' Index | {ei_intensity:.3f} ({ei_i_status}) | {ei_yolo:.3f} ({ei_y_status}) | **{ei_ensemble:.3f} ({ei_e_status})** |")
465
+ lines.append(f"| DESH Score | {desh_intensity_score}/6 | {desh_yolo_score}/3 | {desh_ensemble} (max) |")
466
+
467
+ desh_pos_i = "Yes" if intensity["desh_positive"] else "No"
468
+ desh_pos_y = ("Yes" if yolo and yolo["desh_score"] >= 2 else "No") if yolo else "N/A"
469
+ lines.append(f"| DESH Positive | {desh_pos_i} | {desh_pos_y} | -- |")
470
+
471
+ if yolo:
472
+ lines.append(f"| Detections | -- | {yolo['n_detections']} objects | -- |")
473
+
474
+ pvh_str = f"Grade {intensity['pvh_grade']}/3" if intensity["pvh_grade"] is not None else "N/A (not FLAIR)"
475
+ pvh_y_str = ("Yes" if yolo and yolo["pvh_detected"] else "No") if yolo else "N/A"
476
+ lines.append(f"| PVH | {pvh_str} | {pvh_y_str} | -- |")
477
+
478
+ lines.append(f"| Processing Time | {t_intensity}s | {t_yolo}s | -- |")
479
+
480
+ lines.append("\n---\n## Intensity Engine Details\n")
481
+ if intensity.get("frontal_horn_mm"):
482
+ lines.append(f"- Frontal horn width: {intensity['frontal_horn_mm']} mm")
483
+ if intensity.get("skull_diameter_mm"):
484
+ lines.append(f"- Skull diameter: {intensity['skull_diameter_mm']} mm")
485
+ if intensity["temporal_horn_px"] > 0:
486
+ mm_str = f" ({intensity['temporal_horn_mm']} mm)" if intensity.get("temporal_horn_mm") else ""
487
+ lines.append(f"- Temporal horn width: {intensity['temporal_horn_px']} px{mm_str}")
488
+ if intensity["third_ventricle_px"] > 0:
489
+ mm_str = f" ({intensity['third_ventricle_mm']} mm)" if intensity.get("third_ventricle_mm") else ""
490
+ lines.append(f"- Third ventricle width: {intensity['third_ventricle_px']} px{mm_str}")
491
+ lines.append(f"- Ventricle/Brain ratio: {intensity['vb_ratio']:.4f} ({intensity['vent_area']}/{intensity['brain_area']} px)")
492
+ lines.append(f"- DESH breakdown: Vent={intensity['desh_ventriculomegaly']}/2, Sylvian={intensity['desh_sylvian']}/2, Convexity={intensity['desh_convexity']}/2")
493
+ if intensity.get("callosal_angle") is not None:
494
+ ca = intensity["callosal_angle"]
495
+ ca_str = "Suggestive" if ca < 90 else ("Indeterminate" if ca < 120 else "Normal")
496
+ lines.append(f"- Callosal angle: {ca:.1f} deg ({ca_str})")
497
+ if intensity["pvh_grade"] is not None:
498
+ lines.append(f"- PVH: Grade {intensity['pvh_grade']}/3 (ratio: {intensity['pvh_ratio']:.4f})")
499
+
500
+ if yolo:
501
+ lines.append("\n---\n## YOLO Detection Details\n")
502
+ for b in yolo["boxes"]:
503
  bw = b["x2"] - b["x1"]
504
  bh = b["y2"] - b["y1"]
505
+ lines.append(f"- **{b['class']}**: {b['confidence']:.1%} conf, {bw}x{bh} px at ({b['x1']},{b['y1']})")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
506
 
507
+ lines.append(f"\n---\n## Ensemble NPH Score: **{nph_result['score']}/100 -- {nph_result['label']}**\n")
508
+ lines.append(f"*{nph_result['recommendation']}*")
509
 
510
+ report = "\n".join(lines)
511
+ yolo_img = yolo["annotated"] if yolo else np.zeros_like(image)
512
 
513
+ return intensity["annotated"], yolo_img, report
514
 
 
 
515
 
516
+ # ===========================================================================
517
+ # Tab 2: Multi-Slice Batch Analysis
518
+ # ===========================================================================
519
 
520
+ def batch_analyze(files, modality, sensitivity):
521
+ """Analyze multiple slices and aggregate results."""
522
+ if not files:
523
+ raise gr.Error("Please upload one or more brain scan images.")
524
 
525
+ results = []
526
+ for f in files:
527
+ img = np.array(Image.open(f.name).convert("RGB"))
528
+ try:
529
+ r = _run_intensity_engine(img, modality, sensitivity, None)
530
+ results.append({
531
+ "file": os.path.basename(f.name),
532
+ "evans_index": r["evans_index"],
533
+ "desh_positive": r["desh_positive"],
534
+ "desh_score": r["desh_score"],
535
+ "vb_ratio": r["vb_ratio"],
536
+ "quality_grade": r["quality"]["grade"],
537
+ "symmetry": r["symmetry"],
538
+ })
539
+ except Exception as e:
540
+ results.append({
541
+ "file": os.path.basename(f.name),
542
+ "evans_index": 0,
543
+ "desh_positive": False,
544
+ "desh_score": 0,
545
+ "vb_ratio": 0,
546
+ "quality_grade": "Error",
547
+ "symmetry": 0,
548
+ "error": str(e),
549
+ })
550
 
551
+ # Aggregate
552
+ valid = [r for r in results if "error" not in r]
553
+ if not valid:
554
+ return "All slices failed to process."
555
+
556
+ ei_values = [r["evans_index"] for r in valid]
557
+ max_ei = max(ei_values)
558
+ max_ei_slice = valid[ei_values.index(max_ei)]["file"]
559
+ mean_ei = np.mean(ei_values)
560
+ any_desh = any(r["desh_positive"] for r in valid)
561
+ max_desh = max(r["desh_score"] for r in valid)
562
+ mean_vb = np.mean([r["vb_ratio"] for r in valid])
563
+
564
+ # Score using the max Evans' Index (worst slice = most diagnostic)
565
+ score_input = {
566
+ "evansIndex": max_ei,
567
+ "deshScore": min(max_desh, 3),
568
+ "sylvianDilation": any_desh,
569
+ "corticalAtrophy": "unknown",
570
+ }
571
+ nph_result = _compute_nph_score(score_input)
572
 
573
+ lines = ["# Multi-Slice NPH Analysis\n"]
574
+ lines.append(f"**Slices analyzed:** {len(valid)} / {len(results)}\n")
575
 
576
+ lines.append("---\n## Per-Slice Results\n")
577
+ lines.append("| Slice | Evans' Index | V/B Ratio | DESH | Quality | Symmetry |")
578
+ lines.append("|---|---|---|---|---|---|")
579
+ for r in results:
580
+ if "error" in r:
581
+ lines.append(f"| {r['file']} | ERROR | -- | -- | -- | -- |")
582
+ else:
583
+ ei_flag = " **" if r["evans_index"] > 0.3 else ""
584
+ desh_flag = "POS" if r["desh_positive"] else f"{r['desh_score']}/6"
585
+ lines.append(f"| {r['file']} | {r['evans_index']:.3f}{ei_flag} | {r['vb_ratio']:.4f} | {desh_flag} | {r['quality_grade']} | {r['symmetry']}% |")
586
+
587
+ lines.append(f"\n---\n## Aggregate Summary\n")
588
+ lines.append(f"- **Max Evans' Index:** {max_ei:.3f} (slice: {max_ei_slice})" + (" -- ABNORMAL" if max_ei > 0.3 else ""))
589
+ lines.append(f"- **Mean Evans' Index:** {mean_ei:.3f}")
590
+ lines.append(f"- **Mean V/B Ratio:** {mean_vb:.4f}")
591
+ lines.append(f"- **Max DESH Score:** {max_desh}/6")
592
+ lines.append(f"- **Any DESH Positive:** {'Yes' if any_desh else 'No'}")
593
+
594
+ lines.append(f"\n---\n## NPH Score: **{nph_result['score']}/100 -- {nph_result['label']}**\n")
595
+ lines.append(f"*Based on worst-case slice (max EI). {nph_result['recommendation']}*")
596
+
597
+ return "\n".join(lines)
598
 
599
 
600
  # ===========================================================================
601
+ # Tab 3: Clinical Scoring Calculator
602
  # ===========================================================================
603
 
604
  def compute_clinical_score(
605
+ evans_index, callosal_angle_str, desh_score, sylvian_dilation, vsr_str,
606
+ gait, cognition, urinary, cortical_atrophy
 
 
607
  ):
 
608
  callosal = None
609
  if callosal_angle_str and callosal_angle_str.strip():
610
  try:
 
620
  pass
621
 
622
  triad = [gait, cognition, urinary]
623
+ atrophy_map = {"None/Mild": "none", "Moderate": "moderate", "Significant": "significant"}
 
 
 
 
 
624
 
625
  score_data = {
626
+ "evansIndex": evans_index, "callosalAngle": callosal,
627
+ "deshScore": int(desh_score), "sylvianDilation": sylvian_dilation,
628
+ "vsr": vsr, "triad": triad,
 
 
 
629
  "corticalAtrophy": atrophy_map.get(cortical_atrophy, "unknown"),
630
  }
 
631
  result = _compute_nph_score(score_data)
632
 
633
+ lines = [f"# NPH Score: {result['score']}/100", f"## {result['label']}\n", f"{result['recommendation']}\n"]
 
 
 
 
 
634
  lines.append("---\n### Input Summary\n")
635
  lines.append(f"- **Evans' Index:** {evans_index:.3f}" + (" (>0.3 = abnormal)" if evans_index > 0.3 else ""))
636
  if callosal is not None:
637
+ lines.append(f"- **Callosal Angle:** {callosal:.1f} deg")
 
 
638
  lines.append(f"- **DESH Score:** {int(desh_score)}/3")
639
  lines.append(f"- **Sylvian Dilation:** {'Yes' if sylvian_dilation else 'No'}")
640
  if vsr is not None:
641
  lines.append(f"- **VSR:** {vsr:.2f}" + (" (>2.0 = strong NPH indicator)" if vsr > 2.0 else ""))
 
 
642
  triad_count = sum(triad)
643
+ lines.append(f"- **Hakim Triad:** {triad_count}/3")
644
  lines.append(f"- **Cortical Atrophy:** {cortical_atrophy}")
645
 
646
+ return "\n".join(lines)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
647
 
648
+
649
+ # ===========================================================================
650
+ # Tab 4: Report Generator
651
+ # ===========================================================================
652
+
653
+ def generate_report(image, modality, sensitivity, pixel_spacing_str,
654
+ patient_id, patient_age, clinical_history,
655
+ gait, cognition, urinary):
656
+ """Generate a structured clinical radiology report."""
657
+ if image is None:
658
+ raise gr.Error("Please upload a brain scan first.")
659
+
660
+ pixel_spacing = None
661
+ if pixel_spacing_str and pixel_spacing_str.strip():
662
+ try:
663
+ pixel_spacing = float(pixel_spacing_str.strip())
664
+ except ValueError:
665
+ pass
666
+
667
+ intensity = _run_intensity_engine(image, modality, sensitivity, pixel_spacing)
668
+ yolo = _run_yolo_engine(image, 0.25)
669
+
670
+ ei = intensity["evans_index"]
671
+ ei_y = yolo["evans_index"] if yolo else None
672
+ ei_ensemble = round((ei * 0.6 + ei_y * 0.4), 4) if ei_y else ei
673
+
674
+ triad = [gait, cognition, urinary]
675
+ triad_count = sum(triad)
676
+
677
+ score_input = {
678
+ "evansIndex": ei_ensemble,
679
+ "callosalAngle": intensity.get("callosal_angle"),
680
+ "deshScore": intensity["desh_score"],
681
+ "sylvianDilation": intensity["desh_sylvian"] > 0,
682
+ "triad": triad,
683
+ "corticalAtrophy": "unknown",
684
+ }
685
+ nph_result = _compute_nph_score(score_input)
686
+
687
+ lines = []
688
+ lines.append("# NEURORADIOLOGY REPORT")
689
+ lines.append("## Normal Pressure Hydrocephalus Assessment\n")
690
+ lines.append("---\n")
691
+ lines.append(f"**Patient ID:** {patient_id or 'Anonymous'}")
692
+ lines.append(f"**Age:** {patient_age or 'Not specified'}")
693
+ lines.append(f"**Date:** {datetime.now().strftime('%Y-%m-%d')}")
694
+ lines.append(f"**Modality:** {modality}")
695
+ lines.append(f"**Clinical History:** {clinical_history or 'Not provided'}\n")
696
+
697
+ lines.append("---\n## CLINICAL PRESENTATION\n")
698
+ symptoms = []
699
+ if gait: symptoms.append("gait disturbance")
700
+ if cognition: symptoms.append("cognitive impairment")
701
+ if urinary: symptoms.append("urinary incontinence")
702
+ if symptoms:
703
+ lines.append(f"Patient presents with {', '.join(symptoms)} ({triad_count}/3 Hakim triad components).")
704
+ else:
705
+ lines.append("No specific Hakim triad symptoms reported.")
706
+
707
+ lines.append("\n---\n## FINDINGS\n")
708
+ lines.append("### Ventricular System")
709
+ ei_word = "abnormally enlarged" if ei_ensemble > 0.3 else "within normal limits"
710
+ lines.append(f"The lateral ventricles are {ei_word} with an Evans' Index of **{ei_ensemble:.3f}** (normal < 0.3).")
711
+ if intensity.get("frontal_horn_mm"):
712
+ lines.append(f"Frontal horn width measures {intensity['frontal_horn_mm']} mm with a biparietal skull diameter of {intensity['skull_diameter_mm']} mm.")
713
+ lines.append(f"Ventricle-to-brain parenchyma ratio is {intensity['vb_ratio']:.4f}.")
714
+
715
+ if intensity["temporal_horn_px"] > 0:
716
+ mm_str = f" ({intensity['temporal_horn_mm']} mm)" if intensity.get("temporal_horn_mm") else ""
717
+ lines.append(f"\nThe temporal horns measure {intensity['temporal_horn_px']} px{mm_str}.")
718
+
719
+ if intensity["third_ventricle_px"] > 0:
720
+ mm_str = f" ({intensity['third_ventricle_mm']} mm)" if intensity.get("third_ventricle_mm") else ""
721
+ lines.append(f"Third ventricle width is {intensity['third_ventricle_px']} px{mm_str}.")
722
+
723
+ if intensity.get("callosal_angle") is not None:
724
+ ca = intensity["callosal_angle"]
725
+ ca_word = "acutely narrowed, consistent with NPH" if ca < 90 else "within normal range"
726
+ lines.append(f"\nThe callosal angle measures {ca:.1f} degrees ({ca_word}).")
727
+
728
+ lines.append("\n### DESH Assessment")
729
+ desh_word = "present" if intensity["desh_positive"] else "not fully met"
730
+ lines.append(f"DESH pattern is **{desh_word}** (score: {intensity['desh_score']}/6).")
731
+ lines.append(f"- Ventriculomegaly: {intensity['desh_ventriculomegaly']}/2")
732
+ lines.append(f"- Sylvian fissure dilation: {intensity['desh_sylvian']}/2")
733
+ lines.append(f"- High convexity tightness: {intensity['desh_convexity']}/2")
734
+
735
+ if intensity["pvh_grade"] is not None:
736
+ pvh_desc = {0: "absent", 1: "pencil-thin periventricular rim", 2: "smooth periventricular halo", 3: "irregular extension into deep white matter"}
737
+ lines.append(f"\n### Periventricular Changes")
738
+ lines.append(f"PVH Grade **{intensity['pvh_grade']}/3**: {pvh_desc.get(intensity['pvh_grade'], '')}.")
739
+
740
+ lines.append(f"\n### Image Quality")
741
+ q = intensity["quality"]
742
+ lines.append(f"Image quality is {q['grade'].lower()} (score: {q['overall']}/100). Symmetry index: {intensity['symmetry']}%.")
743
+
744
+ if yolo:
745
+ lines.append(f"\n### AI Structure Detection (YOLO)")
746
+ lines.append(f"{yolo['n_detections']} structures detected:")
747
+ for b in yolo["boxes"]:
748
+ lines.append(f"- {b['class']}: {b['confidence']:.0%} confidence")
749
+
750
+ lines.append(f"\n---\n## IMPRESSION\n")
751
+ lines.append(f"**NPH Assessment Score: {nph_result['score']}/100 -- {nph_result['label']}**\n")
752
+
753
+ findings = []
754
+ if ei_ensemble > 0.3:
755
+ findings.append(f"ventriculomegaly (EI={ei_ensemble:.3f})")
756
+ if intensity["desh_positive"]:
757
+ findings.append("DESH pattern")
758
+ if intensity["pvh_grade"] is not None and intensity["pvh_grade"] >= 2:
759
+ findings.append(f"periventricular hyperintensities (Grade {intensity['pvh_grade']})")
760
+ if intensity.get("callosal_angle") is not None and intensity["callosal_angle"] < 90:
761
+ findings.append(f"acute callosal angle ({intensity['callosal_angle']:.0f} deg)")
762
  if triad_count >= 2:
763
+ findings.append(f"clinical Hakim triad ({triad_count}/3)")
 
 
 
764
 
765
+ if findings:
766
+ lines.append(f"Key findings: {', '.join(findings)}.")
767
+ lines.append(f"\n{nph_result['recommendation']}")
768
+
769
+ lines.append(f"\n---\n*This report was generated by the NPH Diagnostic Platform (v3.0). ")
770
+ lines.append(f"Measurements from JPEG/PNG images are approximate. For clinical decisions, ")
771
+ lines.append(f"correlate with DICOM-derived measurements and clinical examination.*")
772
+ lines.append(f"\n*Matheus Rech, MD | {datetime.now().strftime('%Y-%m-%d %H:%M')}*")
773
+
774
+ return intensity["annotated"], "\n".join(lines)
775
 
776
 
777
  # ===========================================================================
778
+ # Tabs 5-8: Filters & ML Models
779
  # ===========================================================================
780
 
781
  def apply_filter(image, effect, intensity):
782
  if image is None:
783
  raise gr.Error("Please upload an image first.")
784
  img = Image.fromarray(image)
 
785
  if effect == "Grayscale":
786
  filtered = ImageOps.grayscale(img).convert("RGB")
787
+ if intensity < 1.0: filtered = Image.blend(img, filtered, intensity)
 
788
  elif effect == "Sepia":
789
  gray = ImageOps.grayscale(img)
790
  sepia = ImageOps.colorize(gray, "#704214", "#C0A080")
791
  filtered = Image.blend(img, sepia, intensity)
792
  elif effect == "Blur":
793
+ filtered = img.filter(ImageFilter.GaussianBlur(radius=max(1, int(intensity * 10))))
 
794
  elif effect == "Sharpen":
795
+ filtered = ImageEnhance.Sharpness(img).enhance(1 + intensity * 4)
 
796
  elif effect == "Edge Detect":
797
+ filtered = Image.blend(img, img.filter(ImageFilter.FIND_EDGES), intensity)
 
 
 
 
798
  elif effect == "Invert":
799
+ filtered = Image.blend(img, ImageOps.invert(img.convert("RGB")), intensity)
 
 
 
 
800
  elif effect == "Brightness":
801
+ filtered = ImageEnhance.Brightness(img).enhance(0.5 + intensity * 1.5)
 
802
  elif effect == "Contrast":
803
+ filtered = ImageEnhance.Contrast(img).enhance(0.5 + intensity * 2)
 
804
  else:
805
  filtered = img
806
  return np.array(filtered)
 
809
  def classify_image(image):
810
  if image is None:
811
  raise gr.Error("Please upload an image first.")
812
+ return {r["label"]: r["score"] for r in get_classifier()(Image.fromarray(image))}
 
813
 
814
  def detect_objects(image, threshold):
815
  if image is None:
816
  raise gr.Error("Please upload an image first.")
817
+ results = get_detector()(Image.fromarray(image), threshold=threshold)
818
+ return (image, [((r["box"]["xmin"], r["box"]["ymin"], r["box"]["xmax"], r["box"]["ymax"]),
819
+ f"{r['label']} ({r['score']:.0%})") for r in results])
 
 
 
 
 
 
820
 
821
  def segment_image(image):
822
  if image is None:
823
  raise gr.Error("Please upload an image first.")
824
+ results = get_segmenter()(Image.fromarray(image))
825
+ return (image, [(np.array(r["mask"]), r["label"]) for r in results])
 
 
 
 
826
 
827
 
828
  # ===========================================================================
829
  # Build the UI
830
  # ===========================================================================
831
 
832
+ CUSTOM_CSS = """
833
+ .main-title { text-align: center; margin-bottom: 0.2em; }
834
+ .subtitle { text-align: center; color: #666; margin-top: 0; font-size: 0.9em; }
835
+ .engine-label { font-weight: 700; font-size: 0.85em; text-transform: uppercase; letter-spacing: 0.05em; }
836
+ footer { display: none !important; }
837
  """
838
 
839
+ with gr.Blocks(theme=gr.themes.Soft(), css=CUSTOM_CSS, title="NPH Diagnostic Platform") as demo:
840
+ gr.Markdown("# NPH Diagnostic Platform", elem_classes="main-title")
841
  gr.Markdown(
842
+ "Dual-engine analysis (intensity segmentation + YOLO detection), ensemble scoring, "
843
+ "multi-slice batch processing, clinical calculator, and structured report generation.",
844
  elem_classes="subtitle"
845
  )
846
 
847
+ # ========== Tab 1: Dual-Engine Analysis ==========
848
+ with gr.Tab("Dual-Engine Analysis"):
849
  gr.Markdown(
850
+ "### Two Engines, One Diagnosis\n"
851
+ "Runs **intensity-based segmentation** AND **YOLO deep learning detection** on the same image, "
852
+ "compares results side-by-side, and produces an **ensemble NPH score** (weighted 60/40)."
 
 
853
  )
854
  with gr.Row():
855
  with gr.Column(scale=1):
856
+ de_input = gr.Image(label="Upload Brain Scan", type="numpy")
857
+ de_modality = gr.Dropdown(
858
  choices=["Axial FLAIR", "Axial T1", "Axial T2", "Coronal T2",
859
  "Axial T2 FFE", "Sagittal T1", "CT Head"],
860
+ value="Axial FLAIR", label="Modality / Sequence"
 
 
 
 
 
 
 
 
 
861
  )
862
+ de_sensitivity = gr.Slider(10, 90, value=50, step=5, label="Sensitivity (%)")
863
+ de_yolo_conf = gr.Slider(0.1, 0.95, value=0.25, step=0.05, label="YOLO Confidence Threshold")
864
+ de_spacing = gr.Textbox(label="Pixel Spacing (mm/px)", placeholder="auto-estimate", value="")
865
+ de_btn = gr.Button("Run Dual-Engine Analysis", variant="primary", size="lg")
 
 
866
 
867
  with gr.Column(scale=2):
868
+ with gr.Row():
869
+ with gr.Column():
870
+ gr.Markdown("**Intensity Engine**", elem_classes="engine-label")
871
+ de_intensity_out = gr.Image(label="Segmentation Overlay", type="numpy")
872
+ with gr.Column():
873
+ gr.Markdown("**YOLO Engine**", elem_classes="engine-label")
874
+ de_yolo_out = gr.Image(label="Detection Overlay", type="numpy")
875
+
876
+ de_report = gr.Markdown(label="Dual-Engine Report")
877
+
878
+ de_btn.click(
879
+ fn=dual_engine_analyze,
880
+ inputs=[de_input, de_modality, de_sensitivity, de_spacing, de_yolo_conf],
881
+ outputs=[de_intensity_out, de_yolo_out, de_report]
882
  )
883
 
884
+ with gr.Accordion("How Ensemble Scoring Works", open=False):
885
  gr.Markdown(
886
+ "The ensemble combines both engines:\n\n"
887
+ "- **Evans' Index**: Weighted average (60% intensity + 40% YOLO)\n"
888
+ "- **DESH Pattern**: Takes the maximum score from either engine\n"
889
+ "- **Sylvian Dilation**: Positive if either engine detects it\n\n"
890
+ "This approach is more robust than either engine alone -- intensity segmentation "
891
+ "is better at precise boundary delineation, while YOLO is better at detecting "
892
+ "spatial patterns and multiple structures simultaneously."
 
 
 
 
 
 
 
 
 
893
  )
894
 
895
+ # ========== Tab 2: Multi-Slice Batch ==========
896
+ with gr.Tab("Multi-Slice Batch"):
897
  gr.Markdown(
898
+ "### Batch Analysis Across Multiple Slices\n"
899
+ "Upload multiple axial slices from the same patient. Each slice is analyzed individually, "
900
+ "then results are aggregated. The **worst-case slice** (highest Evans' Index) drives the NPH score."
 
 
901
  )
902
  with gr.Row():
903
+ with gr.Column():
904
+ batch_files = gr.File(
905
+ label="Upload Multiple Slices",
906
+ file_count="multiple",
907
+ file_types=["image"],
908
  )
909
+ batch_modality = gr.Dropdown(
910
+ choices=["Axial FLAIR", "Axial T1", "Axial T2", "CT Head"],
911
+ value="Axial FLAIR", label="Modality"
912
+ )
913
+ batch_sensitivity = gr.Slider(10, 90, value=50, step=5, label="Sensitivity (%)")
914
+ batch_btn = gr.Button("Analyze All Slices", variant="primary", size="lg")
 
 
 
 
 
 
 
915
 
916
+ batch_report = gr.Markdown(label="Batch Report")
917
+ batch_btn.click(fn=batch_analyze, inputs=[batch_files, batch_modality, batch_sensitivity], outputs=batch_report)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
918
 
919
+ # ========== Tab 3: NPH Score Calculator ==========
920
  with gr.Tab("NPH Score Calculator"):
921
  gr.Markdown(
922
  "### Clinical NPH Scoring Calculator\n"
923
+ "Enter imaging biomarkers and clinical findings to compute a weighted NPH probability score."
 
 
924
  )
925
  with gr.Row():
926
  with gr.Column():
927
  gr.Markdown("#### Imaging Biomarkers")
928
+ calc_evans = gr.Slider(0.0, 0.6, value=0.30, step=0.01, label="Evans' Index")
929
+ calc_callosal = gr.Textbox(label="Callosal Angle (degrees)", placeholder="e.g. 85", value="")
930
+ calc_desh = gr.Slider(0, 3, value=0, step=1, label="DESH Score (0-3)")
 
 
 
 
 
 
 
 
 
 
931
  calc_sylvian = gr.Checkbox(label="Sylvian Fissure Dilation", value=False)
932
+ calc_vsr = gr.Textbox(label="VSR", placeholder="e.g. 2.5", value="")
 
 
 
 
 
933
  with gr.Column():
934
  gr.Markdown("#### Clinical Findings (Hakim Triad)")
935
  calc_gait = gr.Checkbox(label="Gait disturbance", value=False)
936
  calc_cognition = gr.Checkbox(label="Cognitive impairment", value=False)
937
  calc_urinary = gr.Checkbox(label="Urinary incontinence", value=False)
 
938
  gr.Markdown("#### Modifiers")
939
+ calc_atrophy = gr.Radio(["None/Mild", "Moderate", "Significant"], value="None/Mild", label="Cortical Atrophy")
 
 
 
 
 
940
  calc_btn = gr.Button("Calculate NPH Score", variant="primary", size="lg")
941
 
942
+ calc_report = gr.Markdown(label="Score Report")
 
943
  calc_btn.click(
944
  fn=compute_clinical_score,
945
  inputs=[calc_evans, calc_callosal, calc_desh, calc_sylvian, calc_vsr,
 
947
  outputs=calc_report
948
  )
949
 
950
+ # ========== Tab 4: Report Generator ==========
951
+ with gr.Tab("Report Generator"):
952
+ gr.Markdown(
953
+ "### Structured Clinical Report\n"
954
+ "Generates a formal neuroradiology-style NPH assessment report combining imaging analysis "
955
+ "with clinical findings."
956
+ )
957
+ with gr.Row():
958
+ with gr.Column(scale=1):
959
+ rpt_input = gr.Image(label="Upload Brain Scan", type="numpy")
960
+ rpt_modality = gr.Dropdown(
961
+ choices=["Axial FLAIR", "Axial T1", "Axial T2", "Coronal T2", "CT Head"],
962
+ value="Axial FLAIR", label="Modality"
963
+ )
964
+ rpt_sensitivity = gr.Slider(10, 90, value=50, step=5, label="Sensitivity (%)")
965
+ rpt_spacing = gr.Textbox(label="Pixel Spacing (mm/px)", placeholder="auto-estimate", value="")
966
+ gr.Markdown("#### Patient Info")
967
+ rpt_id = gr.Textbox(label="Patient ID", placeholder="Anonymous")
968
+ rpt_age = gr.Textbox(label="Age", placeholder="e.g. 72")
969
+ rpt_history = gr.Textbox(label="Clinical History", lines=2, placeholder="e.g. Progressive gait instability...")
970
+ gr.Markdown("#### Hakim Triad")
971
+ rpt_gait = gr.Checkbox(label="Gait disturbance", value=False)
972
+ rpt_cognition = gr.Checkbox(label="Cognitive impairment", value=False)
973
+ rpt_urinary = gr.Checkbox(label="Urinary incontinence", value=False)
974
+ rpt_btn = gr.Button("Generate Report", variant="primary", size="lg")
975
+
976
+ with gr.Column(scale=2):
977
+ rpt_overlay = gr.Image(label="Segmentation", type="numpy")
978
+ rpt_text = gr.Markdown(label="Clinical Report")
979
+
980
+ rpt_btn.click(
981
+ fn=generate_report,
982
+ inputs=[rpt_input, rpt_modality, rpt_sensitivity, rpt_spacing,
983
+ rpt_id, rpt_age, rpt_history, rpt_gait, rpt_cognition, rpt_urinary],
984
+ outputs=[rpt_overlay, rpt_text]
985
+ )
986
+
987
+ # ========== Tab 5: Browser NPH Detector ==========
988
  with gr.Tab("NPH Detector (Browser)"):
989
  gr.Markdown(
990
+ "### Client-Side NPH Pipeline\n"
991
+ "Runs entirely in your browser via JavaScript Canvas API. Zero server dependency."
 
 
992
  )
993
  gr.HTML(
994
  value='<iframe src="https://mmrech-nph-detector-js.hf.space" '
 
997
  'style="border-radius: 12px; border: 1px solid #333;"></iframe>',
998
  )
999
 
1000
+ # ========== Tab 6: Video Demo ==========
1001
  with gr.Tab("Video Demo"):
1002
+ gr.Markdown("### Whole-Brain Segmentation Demo")
1003
+ gr.Video(value="examples/hydromorph_whole_brain_segmentation.mp4", label="NPH Segmentation Video", autoplay=False)
 
 
 
 
 
 
 
1004
 
1005
+ # ========== Tab 7: Filters ==========
1006
  with gr.Tab("Filters & Effects"):
1007
  with gr.Row():
1008
  with gr.Column():
1009
  filter_input = gr.Image(label="Upload Image", type="numpy")
1010
  filter_effect = gr.Dropdown(
1011
+ choices=["Grayscale", "Sepia", "Blur", "Sharpen", "Edge Detect", "Invert", "Brightness", "Contrast"],
 
1012
  value="Sepia", label="Effect"
1013
  )
1014
+ filter_intensity = gr.Slider(0.0, 1.0, value=0.7, step=0.05, label="Intensity")
1015
  filter_btn = gr.Button("Apply Filter", variant="primary")
1016
  with gr.Column():
1017
  filter_output = gr.Image(label="Result", type="numpy")
1018
  filter_btn.click(fn=apply_filter, inputs=[filter_input, filter_effect, filter_intensity], outputs=filter_output)
1019
 
1020
+ # ========== Tab 8: Classification ==========
1021
  with gr.Tab("Image Classification"):
1022
  with gr.Row():
1023
  with gr.Column():
 
1027
  cls_output = gr.Label(label="Predictions", num_top_classes=5)
1028
  cls_btn.click(fn=classify_image, inputs=cls_input, outputs=cls_output)
1029
 
1030
+ # ========== Tab 9: Object Detection ==========
1031
  with gr.Tab("Object Detection"):
1032
  with gr.Row():
1033
  with gr.Column():
1034
  det_input = gr.Image(label="Upload Image", type="numpy")
1035
+ det_threshold = gr.Slider(0.1, 0.95, value=0.5, step=0.05, label="Confidence Threshold")
1036
  det_btn = gr.Button("Detect Objects", variant="primary")
1037
  with gr.Column():
1038
  det_output = gr.AnnotatedImage(label="Detections")
1039
  det_btn.click(fn=detect_objects, inputs=[det_input, det_threshold], outputs=det_output)
1040
 
1041
+ # ========== Tab 10: Segmentation ==========
1042
  with gr.Tab("Segmentation"):
1043
  with gr.Row():
1044
  with gr.Column():
 
1048
  seg_output = gr.AnnotatedImage(label="Segmentation Map")
1049
  seg_btn.click(fn=segment_image, inputs=seg_input, outputs=seg_output)
1050
 
1051
+ gr.Markdown(
1052
+ "<center style='color: #888; font-size: 0.75em; margin-top: 20px;'>"
1053
+ "NPH Diagnostic Platform v3.0 | Matheus Rech, MD | "
1054
+ "Built with Gradio + YOLO + Transformers"
1055
+ "</center>"
1056
+ )
1057
+
1058
  demo.launch()