anky2002 commited on
Commit
09803bf
Β·
verified Β·
1 Parent(s): 0aa9fa4

Upload agents/sensor_agent.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. agents/sensor_agent.py +218 -373
agents/sensor_agent.py CHANGED
@@ -1,384 +1,229 @@
1
- """
2
- FORENSIQ β€” Sensor Characteristics Agent
3
- Analyzes sensor physics violations:
4
- - PRNU (Photo-Response Non-Uniformity) noise residual analysis
5
- - Noise structure (Poisson-Gaussian model fit)
6
- - Bayer demosaicing artifact detection
7
- """
8
-
9
  import numpy as np
10
  from PIL import Image
11
- from scipy.ndimage import gaussian_filter, uniform_filter
12
  from scipy.signal import convolve2d
13
- from dataclasses import dataclass
14
  from typing import Dict, Any
15
-
16
  from agents.optical_agent import AgentEvidence
17
 
 
 
18
 
19
- # ─── PRNU Noise Residual ────────────────────────────────────────────
20
- def analyze_prnu(img: Image.Image) -> Dict[str, Any]:
21
- """
22
- Extract noise residual fingerprint.
23
- Real cameras leave a unique PRNU pattern; AI images have uniform or random noise.
24
- Inconsistent local noise variance β†’ splicing / AI generation.
25
- """
26
- rgb = np.array(img.convert("RGB")).astype(np.float64)
27
-
28
- noise_residuals = []
29
  for c in range(3):
30
- channel = rgb[:, :, c]
31
- denoised = gaussian_filter(channel, sigma=3.0)
32
- residual = channel - denoised
33
- noise_residuals.append(residual)
34
-
35
- noise = np.stack(noise_residuals, axis=-1)
36
- noise_energy = np.mean(noise ** 2, axis=-1)
37
-
38
- # Local variance map (32x32 blocks)
39
- local_var = uniform_filter(noise_energy, size=32)
40
-
41
- noise_std = float(np.std(local_var))
42
- noise_mean = float(np.mean(local_var))
43
- uniformity = 1.0 - min(noise_std / (noise_mean + 1e-9), 1.0)
44
-
45
- # Correlation between channels (real sensors have correlated PRNU)
46
- r_noise = noise_residuals[0].ravel()
47
- g_noise = noise_residuals[1].ravel()
48
- b_noise = noise_residuals[2].ravel()
49
-
50
- # Subsample for speed
51
- step = max(1, len(r_noise) // 100000)
52
- rg_corr = float(np.corrcoef(r_noise[::step], g_noise[::step])[0, 1])
53
- rb_corr = float(np.corrcoef(r_noise[::step], b_noise[::step])[0, 1])
54
-
55
- # Real cameras: correlated noise residuals; AI: uncorrelated
56
- avg_corr = (rg_corr + rb_corr) / 2
57
-
58
- if uniformity > 0.7 and avg_corr > 0.3:
59
- score = -0.4
60
- note = "Consistent sensor noise pattern with correlated channels (real camera)"
61
- elif uniformity < 0.4:
62
- score = 0.5
63
- note = "Inconsistent noise regions suggest splicing or AI generation"
64
- elif avg_corr < 0.1:
65
- score = 0.4
66
- note = "Uncorrelated channel noise (atypical for real cameras)"
67
- else:
68
- score = 0.1
69
- note = "Moderate noise consistency"
70
-
71
- return {
72
- "test": "PRNU Noise Residual",
73
- "noise_uniformity": round(uniformity, 4),
74
- "noise_mean": round(noise_mean, 4),
75
- "rg_correlation": round(rg_corr, 4),
76
- "rb_correlation": round(rb_corr, 4),
77
- "score": score,
78
- "note": note,
79
- "noise_map": noise_energy,
80
- }
81
-
82
-
83
- # ─── Noise Structure (Poisson-Gaussian Model) ──────────────────────
84
- def analyze_noise_structure(img: Image.Image) -> Dict[str, Any]:
85
- """
86
- Real sensor noise follows σ² = σ²_read + kΒ·I (Poisson-Gaussian).
87
- AI images lack this physical noise model.
88
- """
89
- rgb = np.array(img.convert("RGB")).astype(np.float64)
90
- gray = np.mean(rgb, axis=-1)
91
-
92
- # Compute local mean and local variance in blocks
93
- block_size = 16
94
- h, w = gray.shape
95
- h_crop, w_crop = (h // block_size) * block_size, (w // block_size) * block_size
96
- gray = gray[:h_crop, :w_crop]
97
-
98
- intensities = []
99
- variances = []
100
-
101
- for i in range(0, h_crop, block_size):
102
- for j in range(0, w_crop, block_size):
103
- block = gray[i:i + block_size, j:j + block_size]
104
- intensities.append(float(np.mean(block)))
105
- variances.append(float(np.var(block)))
106
-
107
- intensities = np.array(intensities)
108
- variances = np.array(variances)
109
-
110
- # Filter out extreme blocks
111
- valid = (intensities > 10) & (intensities < 245) & (variances > 0)
112
- if np.sum(valid) < 20:
113
- return {
114
- "test": "Noise Structure (Poisson-Gaussian)",
115
- "score": 0.0,
116
- "note": "Insufficient data for noise model fitting",
117
- }
118
-
119
- I = intensities[valid]
120
- V = variances[valid]
121
-
122
- # Fit linear model: V = a + b*I (Poisson-Gaussian)
123
  try:
124
- coeffs = np.polyfit(I, V, 1)
125
- fitted = np.polyval(coeffs, I)
126
- residual = float(np.mean((V - fitted) ** 2))
127
- r_squared = 1.0 - residual / (np.var(V) + 1e-9)
128
- except Exception:
129
- r_squared = 0.0
130
-
131
- if r_squared > 0.5:
132
- score = -0.3
133
- note = f"Noise follows Poisson-Gaussian model (RΒ²={r_squared:.3f}, real sensor)"
134
- elif r_squared < 0.1:
135
- score = 0.5
136
- note = f"Noise does NOT follow sensor physics (RΒ²={r_squared:.3f}, AI-like)"
137
- else:
138
- score = 0.15
139
- note = f"Weak Poisson-Gaussian fit (RΒ²={r_squared:.3f})"
140
-
141
- return {
142
- "test": "Noise Structure (Poisson-Gaussian)",
143
- "r_squared": round(r_squared, 4),
144
- "slope": round(float(coeffs[0]), 6) if r_squared > 0 else None,
145
- "intercept": round(float(coeffs[1]), 4) if r_squared > 0 else None,
146
- "score": score,
147
- "note": note,
148
- }
149
-
150
-
151
- # ─── Bayer Demosaicing Artifacts ────────────────────────────────────
152
- def analyze_bayer_demosaicing(img: Image.Image) -> Dict[str, Any]:
153
- """
154
- Real cameras: green channel has ~2x samples β†’ lower noise than R/B.
155
- Expected: Οƒ_green < Οƒ_red β‰ˆ Οƒ_blue.
156
- AI images lack this Bayer pattern artifact.
157
- """
158
- rgb = np.array(img.convert("RGB")).astype(np.float64)
159
-
160
- # High-frequency noise per channel
161
- noise_std = {}
162
- for c, name in enumerate(["red", "green", "blue"]):
163
- channel = rgb[:, :, c]
164
- denoised = gaussian_filter(channel, sigma=1.5)
165
- noise = channel - denoised
166
- noise_std[name] = float(np.std(noise))
167
-
168
- green_lower = noise_std["green"] < min(noise_std["red"], noise_std["blue"])
169
- rb_similar = abs(noise_std["red"] - noise_std["blue"]) / (
170
- max(noise_std["red"], noise_std["blue"]) + 1e-9
171
- ) < 0.2
172
-
173
- if green_lower and rb_similar:
174
- score = -0.4
175
- note = (
176
- f"Bayer pattern detected: Οƒ_green({noise_std['green']:.3f}) < "
177
- f"Οƒ_red({noise_std['red']:.3f}) β‰ˆ Οƒ_blue({noise_std['blue']:.3f})"
178
- )
179
- elif green_lower:
180
- score = -0.2
181
- note = "Green channel is quieter but R/B difference is large"
182
- else:
183
- score = 0.4
184
- note = (
185
- f"No Bayer pattern: Οƒ_green({noise_std['green']:.3f}), "
186
- f"Οƒ_red({noise_std['red']:.3f}), Οƒ_blue({noise_std['blue']:.3f})"
187
- )
188
-
189
- return {
190
- "test": "Bayer Demosaicing Artifacts",
191
- "noise_red": round(noise_std["red"], 4),
192
- "noise_green": round(noise_std["green"], 4),
193
- "noise_blue": round(noise_std["blue"], 4),
194
- "green_is_lower": green_lower,
195
- "rb_similar": rb_similar,
196
- "score": score,
197
- "note": note,
198
- }
199
-
200
-
201
- # ─── CFA Pattern Verification ────────────────────────────────────────
202
- def analyze_cfa_pattern(img: Image.Image) -> Dict[str, Any]:
203
- """
204
- Real camera images retain traces of CFA (Color Filter Array) interpolation.
205
- Detect periodic patterns in cross-channel differences at Nyquist frequency.
206
- """
207
- rgb = np.array(img.convert("RGB")).astype(np.float64)
208
- r, g, b = rgb[:, :, 0], rgb[:, :, 1], rgb[:, :, 2]
209
-
210
- # Compute RG and BG differences
211
- rg = r - g
212
- bg = b - g
213
-
214
- # 2D FFT of difference channels
215
- fft_rg = np.abs(np.fft.fftshift(np.fft.fft2(rg)))
216
- fft_bg = np.abs(np.fft.fftshift(np.fft.fft2(bg)))
217
-
218
- h, w = fft_rg.shape
219
- cy, cx = h // 2, w // 2
220
-
221
- # Check for Bayer CFA signature: peaks at (N/2, 0), (0, N/2), (N/2, N/2)
222
- nyquist_energy_rg = float(
223
- fft_rg[cy, 0] + fft_rg[0, cx] + fft_rg[0, 0]
224
- ) / 3
225
- center_energy_rg = float(np.mean(fft_rg[cy - 5:cy + 5, cx - 5:cx + 5]))
226
- cfa_ratio = nyquist_energy_rg / (center_energy_rg + 1e-9)
227
-
228
- if cfa_ratio > 1.5:
229
- score = -0.3
230
- note = f"CFA interpolation traces detected (ratio={cfa_ratio:.2f}, real camera)"
231
- elif cfa_ratio < 0.5:
232
- score = 0.3
233
- note = f"No CFA traces (ratio={cfa_ratio:.2f}, possible AI generation)"
234
- else:
235
- score = 0.0
236
- note = f"Ambiguous CFA analysis (ratio={cfa_ratio:.2f})"
237
-
238
- return {
239
- "test": "CFA Pattern Verification",
240
- "cfa_nyquist_ratio": round(cfa_ratio, 4),
241
- "score": score,
242
- "note": note,
243
- }
244
-
245
-
246
- # ─── Hot/Dead Pixel Analysis ────────────────────────────────────────
247
- def analyze_hot_dead_pixels(img: Image.Image) -> Dict[str, Any]:
248
- """
249
- Real sensors have hot (stuck bright) and dead (stuck dark) pixels.
250
- AI images lack these sensor defects entirely.
251
- """
252
- gray = np.array(img.convert("L")).astype(np.float64)
253
- h, w = gray.shape
254
-
255
- # Local median filter
256
- from scipy.ndimage import median_filter
257
- med = median_filter(gray, size=5)
258
-
259
- diff = np.abs(gray - med)
260
-
261
- # Hot pixels: much brighter than neighbors
262
- hot_threshold = np.percentile(diff, 99.9)
263
- hot_pixels = int(np.sum(diff > hot_threshold))
264
-
265
- # Dead pixels: much darker than neighbors AND very low absolute value
266
- dark_mask = (gray < 5) & (diff > hot_threshold * 0.5)
267
- dead_pixels = int(np.sum(dark_mask))
268
-
269
- total_defects = hot_pixels + dead_pixels
270
- defect_rate = total_defects / (h * w)
271
-
272
- # Real cameras: typically 0.001%-0.01% defective pixels
273
- if 0.00001 < defect_rate < 0.001:
274
- score = -0.2
275
- note = f"Sensor defects detected ({total_defects} pixels, rate={defect_rate:.6f}, real camera)"
276
- elif defect_rate < 0.000001:
277
- score = 0.2
278
- note = f"No sensor defects ({total_defects} pixels, possible AI generation)"
279
- else:
280
- score = 0.0
281
- note = f"Defect rate={defect_rate:.6f} ({total_defects} pixels)"
282
-
283
- return {
284
- "test": "Hot/Dead Pixel Analysis",
285
- "hot_pixels": hot_pixels,
286
- "dead_pixels": dead_pixels,
287
- "defect_rate": round(defect_rate, 8),
288
- "score": score,
289
- "note": note,
290
- }
291
-
292
-
293
- # ─── JPEG Quantization Table Analysis ───────────────────────────────
294
- def analyze_jpeg_quantization(img: Image.Image) -> Dict[str, Any]:
295
- """
296
- Real JPEG images have specific quantization tables from camera firmware.
297
- AI-generated images saved as JPEG have generic tables.
298
- Double-compressed images show quantization table mismatches.
299
- """
300
  try:
301
- qtables = img.quantization
302
- if qtables:
303
- # Standard JPEG quality tables
304
- tables = list(qtables.values())
305
- n_tables = len(tables)
306
-
307
- # Analyze first table (luminance)
308
- if tables:
309
- t = np.array(list(tables[0].values()) if isinstance(tables[0], dict) else list(tables[0]))
310
- if len(t) == 64:
311
- # Check for standard Photoshop/camera patterns
312
- is_uniform = float(np.std(t)) < 5
313
- max_q = int(np.max(t))
314
- min_q = int(np.min(t))
315
-
316
- if is_uniform:
317
- score = 0.2
318
- note = f"Unusual uniform quantization table (std={np.std(t):.1f})"
319
- elif max_q > 100:
320
- score = -0.2
321
- note = f"Heavy compression quantization (max={max_q}, camera-typical)"
322
- else:
323
- score = -0.1
324
- note = f"Standard quantization table ({n_tables} tables, range=[{min_q},{max_q}])"
325
- else:
326
- score = 0.0
327
- note = "Non-standard quantization table size"
328
- else:
329
- score = 0.1
330
- note = "No luminance quantization table found"
331
- else:
332
- score = 0.1
333
- note = "No quantization tables (not JPEG or tables stripped)"
334
- except Exception:
335
- score = 0.0
336
- note = "Unable to read quantization tables"
337
-
338
- return {
339
- "test": "JPEG Quantization Table",
340
- "score": score,
341
- "note": note,
342
- }
343
-
344
-
345
- # ─── Main Agent Entry Point ─────────────────────────────────────────
346
- def run_sensor_agent(img: Image.Image) -> AgentEvidence:
347
- """Run all sensor characteristic tests."""
348
- findings = []
349
- scores = []
350
-
351
- for fn in [analyze_prnu, analyze_noise_structure, analyze_bayer_demosaicing,
352
- analyze_cfa_pattern, analyze_hot_dead_pixels, analyze_jpeg_quantization]:
353
- try:
354
- result = fn(img)
355
- findings.append(result)
356
- scores.append(result["score"])
357
- except Exception as e:
358
- findings.append({"test": fn.__name__, "error": str(e), "score": 0})
359
-
360
- avg_score = float(np.mean(scores)) if scores else 0.0
361
- confidence = min(1.0, 0.5 + 0.5 * abs(avg_score))
362
-
363
- violations = [f["test"] for f in findings if f.get("score", 0) > 0.2]
364
- compliant = [f["test"] for f in findings if f.get("score", 0) < -0.1]
365
-
366
- if violations:
367
- rationale = f"Sensor violations: {', '.join(violations)}."
368
- elif compliant:
369
- rationale = f"Sensor physics consistent: {', '.join(compliant)}."
370
- else:
371
- rationale = "Sensor analysis inconclusive."
372
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
373
  for f in findings:
374
- if f.get("note"):
375
- rationale += f" [{f['test']}]: {f['note']}."
376
-
377
- return AgentEvidence(
378
- agent_name="Sensor Characteristics Agent",
379
- violation_score=np.clip(avg_score, -1, 1),
380
- confidence=confidence,
381
- failure_prob=max(0.0, 1.0 - len(scores) / 6),
382
- rationale=rationale,
383
- sub_findings=findings,
384
- )
 
1
+ """FORENSIQ β€” Sensor Characteristics Agent (18 features)"""
 
 
 
 
 
 
 
2
  import numpy as np
3
  from PIL import Image
4
+ from scipy.ndimage import gaussian_filter, uniform_filter, median_filter, label
5
  from scipy.signal import convolve2d
 
6
  from typing import Dict, Any
 
7
  from agents.optical_agent import AgentEvidence
8
 
9
+ def _g(img): return np.array(img.convert("L")).astype(np.float64)
10
+ def _rgb(img): return np.array(img.convert("RGB")).astype(np.float64)
11
 
12
+ def s01_prnu_uniformity(img):
13
+ rgb=_rgb(img); res=[]
 
 
 
 
 
 
 
 
14
  for c in range(3):
15
+ ch=rgb[:,:,c]; dn=gaussian_filter(ch,3.0); res.append(ch-dn)
16
+ ne=np.mean(np.stack(res,axis=-1)**2,axis=-1)
17
+ lv=uniform_filter(ne,32); std=float(np.std(lv)); mn=float(np.mean(lv))
18
+ u=1.0-min(std/(mn+1e-9),1.0)
19
+ if u>0.7: s,n=-0.4, f"Uniform sensor noise (uniformity={u:.3f})"
20
+ elif u<0.4: s,n=0.5, f"Inconsistent noise ({u:.3f}) β€” splicing/AI"
21
+ else: s,n=0.1, f"Moderate noise uniformity ({u:.3f})"
22
+ return {"test":"PRNU Uniformity","uniformity":round(u,4),"score":s,"note":n,"noise_map":ne}
23
+
24
+ def s02_prnu_correlation(img):
25
+ rgb=_rgb(img); res=[]
26
+ for c in range(3): ch=rgb[:,:,c]; res.append((ch-gaussian_filter(ch,3.0)).ravel())
27
+ step=max(1,len(res[0])//100000)
28
+ rg=float(np.corrcoef(res[0][::step],res[1][::step])[0,1])
29
+ rb=float(np.corrcoef(res[0][::step],res[2][::step])[0,1])
30
+ avg=(rg+rb)/2
31
+ if avg>0.3: s,n=-0.3, f"Correlated channel noise ({avg:.3f}) β€” real sensor"
32
+ elif avg<0.1: s,n=0.4, f"Uncorrelated noise ({avg:.3f}) β€” AI-like"
33
+ else: s,n=0.1, f"Moderate noise correlation ({avg:.3f})"
34
+ return {"test":"PRNU Cross-Channel","correlation":round(avg,4),"score":s,"note":n}
35
+
36
+ def s03_noise_model(img):
37
+ rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape; bs=16
38
+ hc,wc=(h//bs)*bs,(w//bs)*bs; gray=gray[:hc,:wc]
39
+ I,V=[],[]
40
+ for i in range(0,hc,bs):
41
+ for j in range(0,wc,bs):
42
+ b=gray[i:i+bs,j:j+bs]; I.append(float(np.mean(b))); V.append(float(np.var(b)))
43
+ I,V=np.array(I),np.array(V); v=(I>10)&(I<245)&(V>0)
44
+ if np.sum(v)<20: return {"test":"Poisson-Gaussian Model","score":0.0,"note":"Insufficient data"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  try:
46
+ c=np.polyfit(I[v],V[v],1); f=np.polyval(c,I[v]); r2=1.0-float(np.mean((V[v]-f)**2))/(np.var(V[v])+1e-9)
47
+ except: r2=0.0
48
+ if r2>0.5: s,n=-0.3, f"Poisson-Gaussian fit RΒ²={r2:.3f}"
49
+ elif r2<0.1: s,n=0.5, f"No sensor noise model RΒ²={r2:.3f}"
50
+ else: s,n=0.15, f"Weak fit RΒ²={r2:.3f}"
51
+ return {"test":"Poisson-Gaussian Model","r_squared":round(r2,4),"score":s,"note":n}
52
+
53
+ def s04_bayer(img):
54
+ rgb=_rgb(img); ns={}
55
+ for c,nm in enumerate(["red","green","blue"]):
56
+ ch=rgb[:,:,c]; ns[nm]=float(np.std(ch-gaussian_filter(ch,1.5)))
57
+ gl=ns["green"]<min(ns["red"],ns["blue"])
58
+ rb=abs(ns["red"]-ns["blue"])/(max(ns["red"],ns["blue"])+1e-9)<0.2
59
+ if gl and rb: s,n=-0.4, f"Bayer: ΟƒG({ns['green']:.3f})<ΟƒR({ns['red']:.3f})β‰ˆΟƒB({ns['blue']:.3f})"
60
+ elif gl: s,n=-0.2, "Green quieter but R/B differ"
61
+ else: s,n=0.4, f"No Bayer pattern: ΟƒG={ns['green']:.3f}"
62
+ return {"test":"Bayer CFA Pattern","score":s,"note":n}
63
+
64
+ def s05_cfa_nyquist(img):
65
+ rgb=_rgb(img); rg=rgb[:,:,0]-rgb[:,:,1]; fft=np.abs(np.fft.fftshift(np.fft.fft2(rg)))
66
+ h,w=fft.shape; cy,cx=h//2,w//2
67
+ nyq=float(fft[cy,0]+fft[0,cx]+fft[0,0])/3; cen=float(np.mean(fft[cy-5:cy+5,cx-5:cx+5]))
68
+ r=nyq/(cen+1e-9)
69
+ if r>1.5: s,n=-0.3, f"CFA traces (ratio={r:.2f})"
70
+ elif r<0.5: s,n=0.3, f"No CFA traces ({r:.2f})"
71
+ else: s,n=0.0, f"CFA ratio={r:.2f}"
72
+ return {"test":"CFA Nyquist","ratio":round(r,4),"score":s,"note":n}
73
+
74
+ def s06_hot_dead(img):
75
+ gray=_g(img); h,w=gray.shape; med=median_filter(gray,5); diff=np.abs(gray-med)
76
+ hot=int(np.sum(diff>np.percentile(diff,99.9)))
77
+ dead=int(np.sum((gray<5)&(diff>np.percentile(diff,99.5))))
78
+ rate=(hot+dead)/(h*w)
79
+ if 0.00001<rate<0.001: s,n=-0.2, f"Sensor defects ({hot+dead}px, rate={rate:.6f})"
80
+ elif rate<0.000001: s,n=0.2, f"No defects β€” possible AI"
81
+ else: s,n=0.0, f"Defect rate={rate:.6f}"
82
+ return {"test":"Hot/Dead Pixels","count":hot+dead,"score":s,"note":n}
83
+
84
+ def s07_fixed_pattern(img):
85
+ rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape
86
+ row_means=np.mean(gray,axis=1); col_means=np.mean(gray,axis=0)
87
+ row_var=float(np.var(row_means-gaussian_filter(row_means,10)))
88
+ col_var=float(np.var(col_means-gaussian_filter(col_means,10)))
89
+ fpn=row_var+col_var
90
+ if fpn>5: s,n=-0.2, f"Fixed pattern noise ({fpn:.2f}) β€” sensor"
91
+ elif fpn<0.5: s,n=0.2, f"No fixed pattern ({fpn:.2f})"
92
+ else: s,n=0.0, f"FPN={fpn:.2f}"
93
+ return {"test":"Fixed Pattern Noise","fpn":round(fpn,4),"score":s,"note":n}
94
+
95
+ def s08_dark_current(img):
96
+ gray=_g(img); dark=gray[gray<10]
97
+ if len(dark)<100: return {"test":"Dark Current","score":0.0,"note":"No dark pixels"}
98
+ dk_mean=float(np.mean(dark)); dk_std=float(np.std(dark))
99
+ if dk_std>1: s,n=-0.2, f"Dark current variation (Οƒ={dk_std:.2f}) β€” sensor"
100
+ elif dk_std<0.3: s,n=0.1, f"Flat dark pixels (Οƒ={dk_std:.2f})"
101
+ else: s,n=0.0, f"Dark Οƒ={dk_std:.2f}"
102
+ return {"test":"Dark Current","dark_std":round(dk_std,3),"score":s,"note":n}
103
+
104
+ def s09_read_noise(img):
105
+ rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape
106
+ flat=gray[(gray>100)&(gray<150)]
107
+ if len(flat)<1000: return {"test":"Read Noise Floor","score":0.0,"note":"No flat regions"}
108
+ rn=float(np.std(flat-gaussian_filter(flat.reshape(-1,1),1).ravel()))
109
+ if 0.5<rn<5: s,n=-0.2, f"Read noise={rn:.2f} β€” real sensor"
110
+ elif rn<0.2: s,n=0.3, f"No read noise ({rn:.2f})"
111
+ else: s,n=0.0, f"Read noise={rn:.2f}"
112
+ return {"test":"Read Noise Floor","read_noise":round(rn,3),"score":s,"note":n}
113
+
114
+ def s10_pixel_nonlinearity(img):
115
+ gray=_g(img); bins=np.linspace(0,255,20)
116
+ hist,_=np.histogram(gray,bins=bins); hist=hist.astype(float)
117
+ # Check for gaps/non-linearities in tonal response
118
+ smooth=gaussian_filter(hist.astype(np.float64),2); diff=np.abs(hist-smooth)
119
+ nonlin=float(np.mean(diff)/(np.mean(hist)+1e-9))
120
+ if nonlin<0.1: s,n=-0.2, f"Smooth tonal response ({nonlin:.3f})"
121
+ elif nonlin>0.3: s,n=0.3, f"Non-linear tonality ({nonlin:.3f})"
122
+ else: s,n=0.0, f"Tonal linearity={nonlin:.3f}"
123
+ return {"test":"Pixel Response Linearity","nonlinearity":round(nonlin,4),"score":s,"note":n}
124
+
125
+ def s11_color_matrix(img):
126
+ rgb=_rgb(img)
127
+ rg=float(np.corrcoef(rgb[:,:,0].ravel()[::100],rgb[:,:,1].ravel()[::100])[0,1])
128
+ rb=float(np.corrcoef(rgb[:,:,0].ravel()[::100],rgb[:,:,2].ravel()[::100])[0,1])
129
+ gb=float(np.corrcoef(rgb[:,:,1].ravel()[::100],rgb[:,:,2].ravel()[::100])[0,1])
130
+ avg=(rg+rb+gb)/3
131
+ if 0.5<avg<0.95: s,n=-0.2, f"Natural color matrix (avg_corr={avg:.3f})"
132
+ elif avg>0.98: s,n=0.2, f"Identical channels ({avg:.3f})"
133
+ else: s,n=0.0, f"Color correlation={avg:.3f}"
134
+ return {"test":"Color Matrix Verify","avg_corr":round(avg,4),"score":s,"note":n}
135
+
136
+ def s12_quantization(img):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  try:
138
+ qt=img.quantization
139
+ if qt:
140
+ t=list(list(qt.values())[0].values()) if isinstance(list(qt.values())[0],dict) else list(list(qt.values())[0])
141
+ if len(t)==64:
142
+ mx,mn=int(np.max(t)),int(np.min(t)); std=float(np.std(t))
143
+ if std<5: s,n=0.2, f"Uniform quantization (std={std:.1f})"
144
+ elif mx>100: s,n=-0.2, f"Camera quantization (max={mx})"
145
+ else: s,n=-0.1, f"Standard JPEG table (range=[{mn},{mx}])"
146
+ else: s,n=0.0, "Non-standard table"
147
+ else: s,n=0.1, "No JPEG tables"
148
+ except: s,n=0.0, "Cannot read tables"
149
+ return {"test":"JPEG Quantization","score":s,"note":n}
150
+
151
+ def s13_bit_depth(img):
152
+ gray=_g(img); unique=len(np.unique(gray.astype(int)))
153
+ ratio=unique/256
154
+ if ratio>0.95: s,n=-0.2, f"Full 8-bit usage ({unique} levels)"
155
+ elif ratio<0.5: s,n=0.3, f"Limited tonal range ({unique} levels)"
156
+ else: s,n=0.0, f"{unique} unique levels"
157
+ return {"test":"Bit Depth Usage","unique_levels":unique,"score":s,"note":n}
158
+
159
+ def s14_saturation_clipping(img):
160
+ gray=_g(img); clip_white=float(np.mean(gray>254)); clip_black=float(np.mean(gray<1))
161
+ total=clip_white+clip_black
162
+ if 0.001<total<0.05: s,n=-0.2, f"Natural clipping ({total:.3%})"
163
+ elif total<0.0001: s,n=0.2, f"No clipping ({total:.5%}) β€” unusual"
164
+ elif total>0.1: s,n=0.1, f"Heavy clipping ({total:.1%})"
165
+ else: s,n=0.0, f"Clipping={total:.3%}"
166
+ return {"test":"Saturation Clipping","clip_fraction":round(total,5),"score":s,"note":n}
167
+
168
+ def s15_noise_spatial_freq(img):
169
+ rgb=_rgb(img); gray=np.mean(rgb,axis=-1)
170
+ noise=gray-gaussian_filter(gray,2)
171
+ fft=np.abs(np.fft.fftshift(np.fft.fft2(noise))); h,w=fft.shape; cy,cx=h//2,w//2
172
+ lf=float(np.mean(fft[cy-h//8:cy+h//8,cx-w//8:cx+w//8]))
173
+ hf=float(np.mean(fft))-lf
174
+ ratio=hf/(lf+1e-9)
175
+ if ratio>1.5: s,n=-0.2, f"High-freq noise dominant ({ratio:.2f}) β€” sensor"
176
+ elif ratio<0.5: s,n=0.3, f"Low-freq noise ({ratio:.2f}) β€” unusual"
177
+ else: s,n=0.0, f"Noise freq ratio={ratio:.2f}"
178
+ return {"test":"Noise Spatial Frequency","ratio":round(ratio,3),"score":s,"note":n}
179
+
180
+ def s16_green_imbalance(img):
181
+ rgb=_rgb(img); g=rgb[:,:,1]; h,w=g.shape
182
+ g1=g[0::2,0::2]; g2=g[1::2,1::2]
183
+ mh,mw=min(g1.shape[0],g2.shape[0]),min(g1.shape[1],g2.shape[1])
184
+ diff=float(np.mean(np.abs(g1[:mh,:mw]-g2[:mh,:mw])))
185
+ if diff>0.5: s,n=-0.2, f"Green channel imbalance ({diff:.3f}) β€” Bayer"
186
+ elif diff<0.1: s,n=0.2, f"Identical green subpixels ({diff:.3f})"
187
+ else: s,n=0.0, f"Green diff={diff:.3f}"
188
+ return {"test":"Green Pixel Imbalance","diff":round(diff,4),"score":s,"note":n}
189
+
190
+ def s17_sensor_crop_factor(img):
191
+ """Check aspect ratio against common sensor sizes."""
192
+ w,h=img.size; ratio=max(w,h)/min(w,h)
193
+ common=[1.0,4/3,3/2,16/9,1.85,2.35,2.39]
194
+ min_diff=min(abs(ratio-c) for c in common)
195
+ if min_diff<0.02: s,n=-0.1, f"Standard aspect ratio ({ratio:.3f})"
196
+ elif min_diff>0.1: s,n=0.2, f"Unusual aspect ratio ({ratio:.3f})"
197
+ else: s,n=0.0, f"Aspect ratio={ratio:.3f}"
198
+ return {"test":"Sensor Aspect Ratio","ratio":round(ratio,4),"score":s,"note":n}
199
+
200
+ def s18_demosaic_interpolation(img):
201
+ rgb=_rgb(img); h,w,_=rgb.shape
202
+ # Check for demosaic interpolation artifacts at pixel level
203
+ r=rgb[:,:,0]; g=rgb[:,:,1]; b=rgb[:,:,2]
204
+ # Real demosaiced images: neighboring pixels in same channel are correlated
205
+ r_h_corr = float(np.corrcoef(r[:,:-1].ravel()[::100],r[:,1:].ravel()[::100])[0,1])
206
+ g_h_corr = float(np.corrcoef(g[:,:-1].ravel()[::100],g[:,1:].ravel()[::100])[0,1])
207
+ # In Bayer, green has higher correlation due to 2x sampling
208
+ if g_h_corr > r_h_corr + 0.005: s,n = -0.3, f"Demosaic pattern (G_corr={g_h_corr:.4f}>R_corr={r_h_corr:.4f})"
209
+ elif abs(g_h_corr-r_h_corr)<0.001: s,n = 0.2, f"No demosaic signature"
210
+ else: s,n = 0.0, f"G_corr={g_h_corr:.4f}, R_corr={r_h_corr:.4f}"
211
+ return {"test":"Demosaic Interpolation","g_corr":round(g_h_corr,4),"r_corr":round(r_h_corr,4),"score":s,"note":n}
212
+
213
+ ALL_TESTS=[s01_prnu_uniformity,s02_prnu_correlation,s03_noise_model,s04_bayer,s05_cfa_nyquist,
214
+ s06_hot_dead,s07_fixed_pattern,s08_dark_current,s09_read_noise,s10_pixel_nonlinearity,
215
+ s11_color_matrix,s12_quantization,s13_bit_depth,s14_saturation_clipping,
216
+ s15_noise_spatial_freq,s16_green_imbalance,s17_sensor_crop_factor,s18_demosaic_interpolation]
217
+
218
+ def run_sensor_agent(img):
219
+ findings,scores=[],[]
220
+ for fn in ALL_TESTS:
221
+ try: r=fn(img); findings.append(r); scores.append(r["score"])
222
+ except Exception as e: findings.append({"test":fn.__name__,"error":str(e),"score":0})
223
+ avg=float(np.mean(scores)) if scores else 0.0; conf=min(1.0,0.5+0.5*abs(avg))
224
+ viol=[f["test"] for f in findings if f.get("score",0)>0.2]
225
+ comp=[f["test"] for f in findings if f.get("score",0)<-0.1]
226
+ rat=f"Sensor violations: {', '.join(viol)}." if viol else f"Sensor consistent: {', '.join(comp)}." if comp else "Sensor inconclusive."
227
  for f in findings:
228
+ if f.get("note"): rat+=f" [{f['test']}]: {f['note']}."
229
+ return AgentEvidence("Sensor Characteristics Agent",np.clip(avg,-1,1),conf,max(0,1-len(scores)/len(ALL_TESTS)),rat,findings)