| """FORENSIQ β Sensor Characteristics Agent (18 features)""" |
| import numpy as np |
| from PIL import Image |
| from scipy.ndimage import gaussian_filter, uniform_filter, median_filter, label |
| from scipy.signal import convolve2d |
| from typing import Dict, Any |
| from agents.optical_agent import AgentEvidence |
|
|
| def _g(img): return np.array(img.convert("L")).astype(np.float64) |
| def _rgb(img): return np.array(img.convert("RGB")).astype(np.float64) |
|
|
| def s01_prnu_uniformity(img): |
| rgb=_rgb(img); res=[] |
| for c in range(3): |
| ch=rgb[:,:,c]; dn=gaussian_filter(ch,3.0); res.append(ch-dn) |
| ne=np.mean(np.stack(res,axis=-1)**2,axis=-1) |
| lv=uniform_filter(ne,32); std=float(np.std(lv)); mn=float(np.mean(lv)) |
| u=1.0-min(std/(mn+1e-9),1.0) |
| if u>0.7: s,n=-0.4, f"Spatially consistent noise pattern (uniformity={u:.3f}) β authentic sensor" |
| elif u<0.4: s,n=0.5, f"Inconsistent noise ({u:.3f}) β splicing/AI" |
| else: s,n=0.1, f"Moderate noise uniformity ({u:.3f})" |
| return {"test":"PRNU Uniformity","uniformity":round(u,4),"score":s,"note":n,"noise_map":ne} |
|
|
| def s02_prnu_correlation(img): |
| rgb=_rgb(img); res=[] |
| for c in range(3): ch=rgb[:,:,c]; res.append((ch-gaussian_filter(ch,3.0)).ravel()) |
| step=max(1,len(res[0])//100000) |
| rg=float(np.corrcoef(res[0][::step],res[1][::step])[0,1]) |
| rb=float(np.corrcoef(res[0][::step],res[2][::step])[0,1]) |
| avg=(rg+rb)/2 |
| if avg>0.3: s,n=-0.3, f"Correlated channel noise ({avg:.3f}) β real sensor" |
| elif avg<0.1: s,n=0.4, f"Uncorrelated noise ({avg:.3f}) β AI-like" |
| else: s,n=0.1, f"Moderate noise correlation ({avg:.3f})" |
| return {"test":"PRNU Cross-Channel","correlation":round(avg,4),"score":s,"note":n} |
|
|
| def s03_noise_model(img): |
| rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape; bs=16 |
| hc,wc=(h//bs)*bs,(w//bs)*bs; gray=gray[:hc,:wc] |
| I,V=[],[] |
| for i in range(0,hc,bs): |
| for j in range(0,wc,bs): |
| b=gray[i:i+bs,j:j+bs]; I.append(float(np.mean(b))); V.append(float(np.var(b))) |
| I,V=np.array(I),np.array(V); v=(I>10)&(I<245)&(V>0) |
| if np.sum(v)<20: return {"test":"Poisson-Gaussian Model","score":0.0,"note":"Insufficient data"} |
| try: |
| c=np.polyfit(I[v],V[v],1); f=np.polyval(c,I[v]); r2=1.0-float(np.mean((V[v]-f)**2))/(np.var(V[v])+1e-9) |
| except: r2=0.0 |
| if r2>0.5: s,n=-0.3, f"Poisson-Gaussian fit RΒ²={r2:.3f}" |
| elif r2<0.1: s,n=0.5, f"No sensor noise model RΒ²={r2:.3f}" |
| else: s,n=0.15, f"Weak fit RΒ²={r2:.3f}" |
| return {"test":"Poisson-Gaussian Model","r_squared":round(r2,4),"score":s,"note":n} |
|
|
| def s04_bayer(img): |
| rgb=_rgb(img); ns={} |
| for c,nm in enumerate(["red","green","blue"]): |
| ch=rgb[:,:,c]; ns[nm]=float(np.std(ch-gaussian_filter(ch,1.5))) |
| gl=ns["green"]<min(ns["red"],ns["blue"]) |
| rb=abs(ns["red"]-ns["blue"])/(max(ns["red"],ns["blue"])+1e-9)<0.2 |
| if gl and rb: s,n=-0.4, f"Bayer: ΟG({ns['green']:.3f})<ΟR({ns['red']:.3f})βΟB({ns['blue']:.3f})" |
| elif gl: s,n=-0.2, "Green quieter but R/B differ" |
| else: s,n=0.4, f"No Bayer pattern: ΟG={ns['green']:.3f}" |
| return {"test":"Bayer CFA Pattern","score":s,"note":n} |
|
|
| def s05_cfa_nyquist(img): |
| rgb=_rgb(img); rg=rgb[:,:,0]-rgb[:,:,1]; fft=np.abs(np.fft.fftshift(np.fft.fft2(rg))) |
| h,w=fft.shape; cy,cx=h//2,w//2 |
| nyq=float(fft[cy,0]+fft[0,cx]+fft[0,0])/3; cen=float(np.mean(fft[cy-5:cy+5,cx-5:cx+5])) |
| r=nyq/(cen+1e-9) |
| if r>1.5: s,n=-0.3, f"CFA traces (ratio={r:.2f})" |
| elif r<0.5: s,n=0.3, f"No CFA traces ({r:.2f})" |
| else: s,n=0.0, f"CFA ratio={r:.2f}" |
| return {"test":"CFA Nyquist","ratio":round(r,4),"score":s,"note":n} |
|
|
| def s06_hot_dead(img): |
| gray=_g(img); h,w=gray.shape; med=median_filter(gray,5); diff=np.abs(gray-med) |
| hot=int(np.sum(diff>np.percentile(diff,99.9))) |
| dead=int(np.sum((gray<5)&(diff>np.percentile(diff,99.5)))) |
| rate=(hot+dead)/(h*w) |
| |
| is_jpeg = img.format == "JPEG" if hasattr(img, 'format') and img.format else False |
| if 0.00001<rate<0.001: s,n=-0.2, f"Sensor defects ({hot+dead}px, rate={rate:.6f})" |
| elif rate<0.000001 and not is_jpeg: s,n=0.15, f"No defects, non-JPEG β mild AI indicator" |
| elif rate<0.000001 and is_jpeg: s,n=0.0, f"No defects (JPEG strips hot pixels β inconclusive)" |
| else: s,n=0.0, f"Defect rate={rate:.6f}" |
| return {"test":"Hot/Dead Pixels","count":hot+dead,"score":s,"note":n} |
|
|
| def s07_fixed_pattern(img): |
| rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape |
| row_means=np.mean(gray,axis=1); col_means=np.mean(gray,axis=0) |
| row_var=float(np.var(row_means-gaussian_filter(row_means,10))) |
| col_var=float(np.var(col_means-gaussian_filter(col_means,10))) |
| fpn=row_var+col_var |
| if fpn>5: s,n=-0.2, f"Fixed pattern noise ({fpn:.2f}) β sensor" |
| elif fpn<0.5: s,n=0.2, f"No fixed pattern ({fpn:.2f})" |
| else: s,n=0.0, f"FPN={fpn:.2f}" |
| return {"test":"Fixed Pattern Noise","fpn":round(fpn,4),"score":s,"note":n} |
|
|
| def s08_dark_current(img): |
| gray=_g(img); dark=gray[gray<10] |
| if len(dark)<100: return {"test":"Dark Current","score":0.0,"note":"No dark pixels"} |
| dk_mean=float(np.mean(dark)); dk_std=float(np.std(dark)) |
| if dk_std>1: s,n=-0.2, f"Dark current variation (Ο={dk_std:.2f}) β sensor" |
| elif dk_std<0.3: s,n=0.1, f"Flat dark pixels (Ο={dk_std:.2f})" |
| else: s,n=0.0, f"Dark Ο={dk_std:.2f}" |
| return {"test":"Dark Current","dark_std":round(dk_std,3),"score":s,"note":n} |
|
|
| def s09_read_noise(img): |
| rgb=_rgb(img); gray=np.mean(rgb,axis=-1); h,w=gray.shape |
| |
| flat_mask = (gray > 100) & (gray < 150) |
| if np.sum(flat_mask) < 1000: return {"test":"Read Noise Floor","score":0.0,"note":"No flat regions"} |
| |
| smoothed = gaussian_filter(gray, sigma=2.0) |
| noise_residual = gray - smoothed |
| rn = float(np.std(noise_residual[flat_mask])) |
| if 0.5<rn<5: s,n=-0.2, f"Read noise={rn:.2f} β real sensor" |
| elif rn<0.2: s,n=0.3, f"No read noise ({rn:.2f})" |
| else: s,n=0.0, f"Read noise={rn:.2f}" |
| return {"test":"Read Noise Floor","read_noise":round(rn,3),"score":s,"note":n} |
|
|
| def s10_pixel_nonlinearity(img): |
| gray=_g(img); bins=np.linspace(0,255,20) |
| hist,_=np.histogram(gray,bins=bins); hist=hist.astype(float) |
| |
| smooth=gaussian_filter(hist.astype(np.float64),2); diff=np.abs(hist-smooth) |
| nonlin=float(np.mean(diff)/(np.mean(hist)+1e-9)) |
| if nonlin<0.1: s,n=-0.2, f"Smooth tonal response ({nonlin:.3f})" |
| elif nonlin>0.3: s,n=0.3, f"Non-linear tonality ({nonlin:.3f})" |
| else: s,n=0.0, f"Tonal linearity={nonlin:.3f}" |
| return {"test":"Pixel Response Linearity","nonlinearity":round(nonlin,4),"score":s,"note":n} |
|
|
| def s11_color_matrix(img): |
| rgb=_rgb(img) |
| rg=float(np.corrcoef(rgb[:,:,0].ravel()[::100],rgb[:,:,1].ravel()[::100])[0,1]) |
| rb=float(np.corrcoef(rgb[:,:,0].ravel()[::100],rgb[:,:,2].ravel()[::100])[0,1]) |
| gb=float(np.corrcoef(rgb[:,:,1].ravel()[::100],rgb[:,:,2].ravel()[::100])[0,1]) |
| avg=(rg+rb+gb)/3 |
| if 0.5<avg<0.95: s,n=-0.2, f"Natural color matrix (avg_corr={avg:.3f})" |
| elif avg>0.98: s,n=0.2, f"Identical channels ({avg:.3f})" |
| else: s,n=0.0, f"Color correlation={avg:.3f}" |
| return {"test":"Color Matrix Verify","avg_corr":round(avg,4),"score":s,"note":n} |
|
|
| def s12_quantization(img): |
| try: |
| qt=img.quantization |
| if qt: |
| t=list(list(qt.values())[0].values()) if isinstance(list(qt.values())[0],dict) else list(list(qt.values())[0]) |
| if len(t)==64: |
| mx,mn=int(np.max(t)),int(np.min(t)); std=float(np.std(t)) |
| if std<5: s,n=0.2, f"Uniform quantization (std={std:.1f})" |
| elif mx>100: s,n=-0.2, f"Camera quantization (max={mx})" |
| else: s,n=-0.1, f"Standard JPEG table (range=[{mn},{mx}])" |
| else: s,n=0.0, "Non-standard table" |
| else: s,n=0.1, "No JPEG tables" |
| except: s,n=0.0, "Cannot read tables" |
| return {"test":"JPEG Quantization","score":s,"note":n} |
|
|
| def s13_bit_depth(img): |
| gray=_g(img); unique=len(np.unique(gray.astype(int))) |
| ratio=unique/256 |
| if ratio>0.95: s,n=-0.2, f"Full 8-bit usage ({unique} levels)" |
| elif ratio<0.5: s,n=0.3, f"Limited tonal range ({unique} levels)" |
| else: s,n=0.0, f"{unique} unique levels" |
| return {"test":"Bit Depth Usage","unique_levels":unique,"score":s,"note":n} |
|
|
| def s14_saturation_clipping(img): |
| gray=_g(img); clip_white=float(np.mean(gray>254)); clip_black=float(np.mean(gray<1)) |
| total=clip_white+clip_black |
| if 0.001<total<0.05: s,n=-0.2, f"Natural clipping ({total:.3%})" |
| elif total<0.0001: s,n=0.2, f"No clipping ({total:.5%}) β unusual" |
| elif total>0.1: s,n=0.1, f"Heavy clipping ({total:.1%})" |
| else: s,n=0.0, f"Clipping={total:.3%}" |
| return {"test":"Saturation Clipping","clip_fraction":round(total,5),"score":s,"note":n} |
|
|
| def s15_noise_spatial_freq(img): |
| rgb=_rgb(img); gray=np.mean(rgb,axis=-1) |
| noise=gray-gaussian_filter(gray,2) |
| fft=np.abs(np.fft.fftshift(np.fft.fft2(noise))); h,w=fft.shape; cy,cx=h//2,w//2 |
| lf=float(np.mean(fft[cy-h//8:cy+h//8,cx-w//8:cx+w//8])) |
| hf=float(np.mean(fft))-lf |
| ratio=hf/(lf+1e-9) |
| if ratio>1.5: s,n=-0.2, f"High-freq noise dominant ({ratio:.2f}) β sensor" |
| elif ratio<0.5: s,n=0.3, f"Low-freq noise ({ratio:.2f}) β unusual" |
| else: s,n=0.0, f"Noise freq ratio={ratio:.2f}" |
| return {"test":"Noise Spatial Frequency","ratio":round(ratio,3),"score":s,"note":n} |
|
|
| def s16_green_imbalance(img): |
| rgb=_rgb(img); g=rgb[:,:,1]; h,w=g.shape |
| g1=g[0::2,0::2]; g2=g[1::2,1::2] |
| mh,mw=min(g1.shape[0],g2.shape[0]),min(g1.shape[1],g2.shape[1]) |
| diff=float(np.mean(np.abs(g1[:mh,:mw]-g2[:mh,:mw]))) |
| if diff>0.5: s,n=-0.2, f"Green channel imbalance ({diff:.3f}) β Bayer" |
| elif diff<0.1: s,n=0.2, f"Identical green subpixels ({diff:.3f})" |
| else: s,n=0.0, f"Green diff={diff:.3f}" |
| return {"test":"Green Pixel Imbalance","diff":round(diff,4),"score":s,"note":n} |
|
|
| def s17_noise_autocorrelation(img): |
| """Real sensor noise has spatial correlation from the anti-aliasing filter. |
| AI noise is typically white (uncorrelated) or has non-physical correlation.""" |
| gray=_g(img); noise=gray-gaussian_filter(gray,2.0) |
| h,w=noise.shape |
| if h<20 or w<20: return {"test":"Noise Autocorrelation","score":0.0,"note":"Too small"} |
| step=max(1,h*w//200000) |
| ac1=float(np.corrcoef(noise[:,:-1].ravel()[::step],noise[:,1:].ravel()[::step])[0,1]) |
| ac2=float(np.corrcoef(noise[:,:-2].ravel()[::step],noise[:,2:].ravel()[::step])[0,1]) |
| decay=ac1-ac2 |
| if ac1>0.05 and decay>0.02: s,n=-0.3, f"AA-filtered noise (ac1={ac1:.3f}, decay={decay:.3f}) β real sensor" |
| elif ac1<0.01: s,n=0.2, f"White noise (ac1={ac1:.3f}) β no AA filter" |
| else: s,n=0.0, f"Noise ac1={ac1:.3f}, decay={decay:.3f}" |
| return {"test":"Noise Autocorrelation","ac1":round(ac1,4),"decay":round(decay,4),"score":s,"note":n} |
|
|
| def s18_demosaic_interpolation(img): |
| rgb=_rgb(img); h,w,_=rgb.shape |
| |
| r=rgb[:,:,0]; g=rgb[:,:,1]; b=rgb[:,:,2] |
| |
| r_h_corr = float(np.corrcoef(r[:,:-1].ravel()[::100],r[:,1:].ravel()[::100])[0,1]) |
| g_h_corr = float(np.corrcoef(g[:,:-1].ravel()[::100],g[:,1:].ravel()[::100])[0,1]) |
| |
| if g_h_corr > r_h_corr + 0.005: s,n = -0.3, f"Demosaic pattern (G_corr={g_h_corr:.4f}>R_corr={r_h_corr:.4f})" |
| elif abs(g_h_corr-r_h_corr)<0.001: s,n = 0.2, f"No demosaic signature" |
| else: s,n = 0.0, f"G_corr={g_h_corr:.4f}, R_corr={r_h_corr:.4f}" |
| return {"test":"Demosaic Interpolation","g_corr":round(g_h_corr,4),"r_corr":round(r_h_corr,4),"score":s,"note":n} |
|
|
| ALL_TESTS=[s01_prnu_uniformity,s02_prnu_correlation,s03_noise_model,s04_bayer,s05_cfa_nyquist, |
| s06_hot_dead,s07_fixed_pattern,s08_dark_current,s09_read_noise,s10_pixel_nonlinearity, |
| s11_color_matrix,s12_quantization,s13_bit_depth,s14_saturation_clipping, |
| s15_noise_spatial_freq,s16_green_imbalance,s17_noise_autocorrelation,s18_demosaic_interpolation] |
|
|
| def run_sensor_agent(img, modality_adjustments=None): |
| from agents.utils import run_agent_tests |
| findings, avg, conf, fail, rat = run_agent_tests(ALL_TESTS, img, "Sensor Characteristics Agent", modality_adjustments) |
| return AgentEvidence("Sensor Characteristics Agent",np.clip(avg,-1,1),conf,fail,rat,findings) |
|
|