anky2002 commited on
Commit
23e2106
Β·
verified Β·
1 Parent(s): 09803bf

Upload agents/model_agent.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. agents/model_agent.py +245 -353
agents/model_agent.py CHANGED
@@ -1,360 +1,252 @@
1
- """
2
- FORENSIQ β€” Generative Model Agent
3
- Detects architecture-specific signatures:
4
- - Frequency grid artifacts (8Γ—8, 16Γ—16 periodic patterns from GANs)
5
- - Diffusion residuals (spectral notches at step noise harmonics)
6
- - Model fingerprinting (frequency-domain generator attribution)
7
- """
8
-
9
  import numpy as np
10
  from PIL import Image
11
  from scipy.signal import find_peaks
12
- from scipy.ndimage import gaussian_filter
13
  from typing import Dict, Any
14
-
15
  from agents.optical_agent import AgentEvidence
16
 
17
-
18
- # ─── Frequency Grid Artifacts ────────────────────────────────────────
19
- def analyze_frequency_grid(img: Image.Image) -> Dict[str, Any]:
20
- """
21
- GAN upsampling (deconv layers) creates periodic grid patterns
22
- visible in 2D FFT as spectral peaks at multiples of 8Γ—8 or 16Γ—16.
23
- """
24
- gray = np.array(img.convert("L")).astype(np.float64)
25
- h, w = gray.shape
26
-
27
- # 2D FFT
28
- fft = np.fft.fft2(gray)
29
- fft_shift = np.fft.fftshift(fft)
30
- magnitude = np.log(np.abs(fft_shift) + 1)
31
-
32
- # Center row/column profiles
33
- cy, cx = h // 2, w // 2
34
- row_profile = magnitude[cy, :]
35
- col_profile = magnitude[:, cx]
36
-
37
- # Find periodic peaks (GAN artifacts show at multiples of N/8, N/16)
38
- row_peaks, row_props = find_peaks(row_profile, distance=5, prominence=0.3)
39
- col_peaks, col_props = find_peaks(col_profile, distance=5, prominence=0.3)
40
-
41
- # Check for regular spacing (grid artifact signature)
42
- def check_periodic(peaks, size):
43
- if len(peaks) < 3:
44
- return 0.0, []
45
- spacings = np.diff(sorted(peaks))
46
- # Expected spacing for 8Γ—8 grid: size/8
47
- expected_8 = size / 8
48
- expected_16 = size / 16
49
- matches_8 = np.sum(np.abs(spacings - expected_8) < expected_8 * 0.15)
50
- matches_16 = np.sum(np.abs(spacings - expected_16) < expected_16 * 0.15)
51
- best = max(matches_8, matches_16)
52
- return float(best / max(len(spacings), 1)), spacings.tolist()
53
-
54
- row_periodic, row_spacings = check_periodic(row_peaks, w)
55
- col_periodic, col_spacings = check_periodic(col_peaks, h)
56
- grid_score = (row_periodic + col_periodic) / 2
57
-
58
- # High-frequency vs mid-frequency energy ratio
59
- hf_ring = magnitude.copy()
60
- hf_ring[cy - h // 8:cy + h // 8, cx - w // 8:cx + w // 8] = 0
61
- hf_energy = float(np.mean(hf_ring))
62
- mf_mask = np.zeros_like(magnitude)
63
- mf_mask[cy - h // 4:cy + h // 4, cx - w // 4:cx + w // 4] = 1
64
- mf_mask[cy - h // 8:cy + h // 8, cx - w // 8:cx + w // 8] = 0
65
- mf_energy = float(np.mean(magnitude * mf_mask))
66
-
67
- if grid_score > 0.4:
68
- score = 0.7
69
- note = f"Periodic grid artifacts detected (GAN signature, periodicity={grid_score:.2f})"
70
- elif grid_score > 0.2:
71
- score = 0.3
72
- note = f"Weak periodic patterns (possible GAN artifacts, periodicity={grid_score:.2f})"
73
- else:
74
- score = -0.2
75
- note = "No periodic grid artifacts (natural frequency spectrum)"
76
-
77
- return {
78
- "test": "Frequency Grid Artifacts",
79
- "grid_periodicity_score": round(grid_score, 4),
80
- "row_peaks": len(row_peaks),
81
- "col_peaks": len(col_peaks),
82
- "hf_energy": round(hf_energy, 4),
83
- "mf_energy": round(mf_energy, 4),
84
- "score": score,
85
- "note": note,
86
- "magnitude_spectrum": magnitude,
87
- }
88
-
89
-
90
- # ─── Diffusion Residuals ────────────────────────────────────────────
91
- def analyze_diffusion_residuals(img: Image.Image) -> Dict[str, Any]:
92
- """
93
- Diffusion models leave characteristic spectral notches and
94
- autocorrelation patterns from the denoising step schedule.
95
- """
96
- gray = np.array(img.convert("L")).astype(np.float64)
97
-
98
- # Compute radial power spectrum
99
- fft = np.fft.fft2(gray)
100
- fft_shift = np.fft.fftshift(fft)
101
- power = np.abs(fft_shift) ** 2
102
-
103
- h, w = power.shape
104
- cy, cx = h // 2, w // 2
105
- Y, X = np.mgrid[0:h, 0:w]
106
- R = np.sqrt((X - cx) ** 2 + (Y - cy) ** 2).astype(int)
107
-
108
- # Radially averaged power spectrum
109
- max_r = min(cy, cx)
110
- radial_power = np.zeros(max_r)
111
- counts = np.zeros(max_r)
112
- for r in range(max_r):
113
- mask = R == r
114
- if mask.any():
115
- radial_power[r] = np.mean(power[mask])
116
- counts[r] = np.sum(mask)
117
-
118
- radial_power = np.log(radial_power + 1)
119
-
120
- # Natural images: power ∝ 1/fΒ² β†’ linear decrease in log-log
121
- freqs = np.arange(1, max_r)
122
- log_freqs = np.log(freqs + 1)
123
- log_power = radial_power[1:max_r]
124
-
125
- # Fit linear model (1/fΒ² slope)
126
- if len(log_freqs) > 10:
127
- coeffs = np.polyfit(log_freqs, log_power, 1)
128
- fitted = np.polyval(coeffs, log_freqs)
129
- residuals = log_power - fitted
130
-
131
- # Diffusion models: spectral notches (negative dips in residuals)
132
- notches, _ = find_peaks(-residuals, prominence=0.5)
133
-
134
- # Smoothness of spectral rolloff
135
- smoothness = float(np.std(residuals))
136
- slope = float(coeffs[0])
137
- else:
138
- notches = []
139
- smoothness = 1.0
140
- slope = 0.0
141
-
142
- # Natural 1/fΒ² slope is ~ -2 in log-log
143
- slope_deviation = abs(slope - (-2.0))
144
-
145
- if len(notches) > 3:
146
- score = 0.5
147
- note = f"Spectral notches detected ({len(notches)} notches, diffusion signature)"
148
- elif smoothness > 1.0 or slope_deviation > 1.5:
149
- score = 0.3
150
- note = f"Unnatural spectral rolloff (slope={slope:.2f}, deviation={slope_deviation:.2f})"
151
- elif smoothness < 0.5 and slope_deviation < 0.5:
152
- score = -0.3
153
- note = f"Natural 1/fΒ² spectral profile (slope={slope:.2f})"
154
- else:
155
- score = 0.1
156
- note = f"Mild spectral deviation (slope={slope:.2f})"
157
-
158
- return {
159
- "test": "Diffusion Residuals",
160
- "spectral_notches": len(notches),
161
- "spectral_smoothness": round(smoothness, 4),
162
- "slope": round(slope, 4),
163
- "slope_deviation": round(slope_deviation, 4),
164
- "score": score,
165
- "note": note,
166
- }
167
-
168
-
169
- # ─── Model Fingerprinting ───────────────────────────────────────────
170
- def analyze_model_fingerprint(img: Image.Image) -> Dict[str, Any]:
171
- """
172
- Different generators leave unique frequency-domain signatures.
173
- Analyzes autocorrelation and spectral texture for attribution.
174
- """
175
- gray = np.array(img.convert("L")).astype(np.float64)
176
-
177
- # Autocorrelation map
178
- fft = np.fft.fft2(gray)
179
- power = np.abs(fft) ** 2
180
- autocorr = np.real(np.fft.ifft2(power))
181
- autocorr = np.fft.fftshift(autocorr)
182
- autocorr = autocorr / (autocorr.max() + 1e-9)
183
-
184
- h, w = autocorr.shape
185
- cy, cx = h // 2, w // 2
186
-
187
- # Check for periodic peaks in autocorrelation (GAN checkerboard)
188
- # Exclude center peak
189
- autocorr_masked = autocorr.copy()
190
- r_exclude = max(h, w) // 20
191
- Y, X = np.mgrid[0:h, 0:w]
192
- center_mask = ((X - cx) ** 2 + (Y - cy) ** 2) < r_exclude ** 2
193
- autocorr_masked[center_mask] = 0
194
-
195
- max_secondary = float(np.max(autocorr_masked))
196
-
197
- # High secondary peak = repetitive structure (GAN artifact)
198
- if max_secondary > 0.3:
199
- score = 0.6
200
- note = f"Strong autocorrelation secondary peak ({max_secondary:.3f}) β€” GAN checkerboard pattern"
201
- elif max_secondary > 0.15:
202
- score = 0.3
203
- note = f"Moderate autocorrelation peak ({max_secondary:.3f}) β€” possible generator artifacts"
204
- else:
205
- score = -0.2
206
- note = f"Natural autocorrelation structure (peak={max_secondary:.3f})"
207
-
208
- return {
209
- "test": "Model Fingerprinting",
210
- "max_secondary_peak": round(max_secondary, 4),
211
- "score": score,
212
- "note": note,
213
- }
214
-
215
-
216
- # ─── GAN Upsampling Checkerboard ─────────────────────────────────────
217
- def analyze_upsampling_checkerboard(img: Image.Image) -> Dict[str, Any]:
218
- """
219
- Transposed convolutions in GANs create checkerboard patterns.
220
- Detected via pixel-level autocorrelation at lag=2.
221
- """
222
- gray = np.array(img.convert("L")).astype(np.float64)
223
- h, w = gray.shape
224
-
225
- # Compute autocorrelation at lag 2 (checkerboard period)
226
- if h < 10 or w < 10:
227
- return {"test": "Upsampling Checkerboard", "score": 0.0, "note": "Image too small"}
228
-
229
- # Horizontal lag-2 autocorrelation
230
- h_auto = float(np.corrcoef(gray[:, :-2].ravel(), gray[:, 2:].ravel())[0, 1])
231
- # Vertical lag-2
232
- v_auto = float(np.corrcoef(gray[:-2, :].ravel(), gray[2:, :].ravel())[0, 1])
233
-
234
- # Compare lag-1 vs lag-2 (checkerboard: lag-2 > lag-1)
235
- h_auto1 = float(np.corrcoef(gray[:, :-1].ravel(), gray[:, 1:].ravel())[0, 1])
236
- v_auto1 = float(np.corrcoef(gray[:-1, :].ravel(), gray[1:, :].ravel())[0, 1])
237
-
238
- # Checkerboard signature: lag-2 corr noticeably higher than lag-1
239
- h_checker = h_auto - h_auto1
240
- v_checker = v_auto - v_auto1
241
- checker_score = (h_checker + v_checker) / 2
242
-
243
- if checker_score > 0.02:
244
- score = 0.5
245
- note = f"Checkerboard pattern detected (Ξ”corr={checker_score:.4f}, GAN upsampling artifact)"
246
- elif checker_score > 0.005:
247
- score = 0.2
248
- note = f"Mild checkerboard tendency (Ξ”corr={checker_score:.4f})"
249
- else:
250
- score = -0.1
251
- note = f"No checkerboard pattern (Ξ”corr={checker_score:.4f})"
252
-
253
- return {
254
- "test": "Upsampling Checkerboard",
255
- "checker_delta": round(checker_score, 6),
256
- "h_lag2": round(h_auto, 4),
257
- "v_lag2": round(v_auto, 4),
258
- "score": score,
259
- "note": note,
260
- }
261
-
262
-
263
- # ─── VAE Boundary Artifacts ─────────────────────────────────────────
264
- def analyze_vae_boundaries(img: Image.Image) -> Dict[str, Any]:
265
- """
266
- VAE-based generators (Stable Diffusion) process in latent patches.
267
- This can create subtle boundary artifacts at 64x64 or 32x32 grid.
268
- """
269
- gray = np.array(img.convert("L")).astype(np.float64)
270
- h, w = gray.shape
271
-
272
- # Check for grid artifacts at common VAE patch sizes
273
- best_score = 0.0
274
- best_size = 0
275
- best_ratio = 1.0
276
-
277
- for patch_size in [32, 64, 128]:
278
- if h < patch_size * 2 or w < patch_size * 2:
279
- continue
280
-
281
- h_crop = (h // patch_size) * patch_size
282
- w_crop = (w // patch_size) * patch_size
283
- g = gray[:h_crop, :w_crop]
284
-
285
- # Measure intensity discontinuity at patch boundaries
286
- boundary_diffs = []
287
- interior_diffs = []
288
-
289
- for i in range(1, h_crop):
290
- row_diff = np.abs(g[i, :] - g[i - 1, :])
291
- if i % patch_size == 0:
292
- boundary_diffs.append(float(np.mean(row_diff)))
293
- elif i % patch_size != 1:
294
- interior_diffs.append(float(np.mean(row_diff)))
295
-
296
- if boundary_diffs and interior_diffs:
297
- ratio = float(np.mean(boundary_diffs)) / (float(np.mean(interior_diffs)) + 1e-9)
298
- if ratio > best_ratio:
299
- best_ratio = ratio
300
- best_size = patch_size
301
-
302
- if best_ratio > 1.3:
303
- score = 0.4
304
- note = f"VAE boundary artifacts at {best_size}px grid (ratio={best_ratio:.3f})"
305
- elif best_ratio > 1.1:
306
- score = 0.2
307
- note = f"Weak boundary artifacts at {best_size}px (ratio={best_ratio:.3f})"
308
- else:
309
- score = -0.1
310
- note = f"No VAE boundary artifacts (max ratio={best_ratio:.3f})"
311
-
312
- return {
313
- "test": "VAE Boundary Artifacts",
314
- "best_boundary_ratio": round(best_ratio, 4),
315
- "best_patch_size": best_size,
316
- "score": score,
317
- "note": note,
318
- }
319
-
320
-
321
- # ─── Main Agent Entry Point ─────────────────────────────────────────
322
- def run_model_agent(img: Image.Image) -> AgentEvidence:
323
- """Run all generative model detection tests."""
324
- findings = []
325
- scores = []
326
-
327
- for fn in [analyze_frequency_grid, analyze_diffusion_residuals, analyze_model_fingerprint,
328
- analyze_upsampling_checkerboard, analyze_vae_boundaries]:
329
- try:
330
- result = fn(img)
331
- findings.append(result)
332
- scores.append(result["score"])
333
- except Exception as e:
334
- findings.append({"test": fn.__name__, "error": str(e), "score": 0})
335
-
336
- avg_score = float(np.mean(scores)) if scores else 0.0
337
- confidence = min(1.0, 0.5 + 0.5 * abs(avg_score))
338
-
339
- violations = [f["test"] for f in findings if f.get("score", 0) > 0.2]
340
- compliant = [f["test"] for f in findings if f.get("score", 0) < -0.1]
341
-
342
- if violations:
343
- rationale = f"Generative model signatures detected: {', '.join(violations)}."
344
- elif compliant:
345
- rationale = f"No generator artifacts found: {', '.join(compliant)}."
346
- else:
347
- rationale = "Generator analysis inconclusive."
348
-
349
  for f in findings:
350
- if f.get("note"):
351
- rationale += f" [{f['test']}]: {f['note']}."
352
-
353
- return AgentEvidence(
354
- agent_name="Generative Model Agent",
355
- violation_score=np.clip(avg_score, -1, 1),
356
- confidence=confidence,
357
- failure_prob=max(0.0, 1.0 - len(scores) / 5),
358
- rationale=rationale,
359
- sub_findings=findings,
360
- )
 
1
+ """FORENSIQ β€” Generative Model Agent (15 features)"""
 
 
 
 
 
 
 
2
  import numpy as np
3
  from PIL import Image
4
  from scipy.signal import find_peaks
5
+ from scipy.ndimage import gaussian_filter, label
6
  from typing import Dict, Any
 
7
  from agents.optical_agent import AgentEvidence
8
 
9
+ def _g(img): return np.array(img.convert("L")).astype(np.float64)
10
+
11
+ def m01_fft_grid_8x8(img):
12
+ gray=_g(img); h,w=gray.shape; fft=np.fft.fftshift(np.fft.fft2(gray)); mag=np.log(np.abs(fft)+1)
13
+ cy,cx=h//2,w//2; rp=mag[cy,:]; cp=mag[:,cx]
14
+ rpeaks,_=find_peaks(rp,distance=5,prominence=0.3); cpeaks,_=find_peaks(cp,distance=5,prominence=0.3)
15
+ def check(peaks,sz):
16
+ if len(peaks)<3: return 0.0
17
+ sp=np.diff(sorted(peaks)); e8=sz/8; e16=sz/16
18
+ m8=np.sum(np.abs(sp-e8)<e8*0.15); m16=np.sum(np.abs(sp-e16)<e16*0.15)
19
+ return float(max(m8,m16)/max(len(sp),1))
20
+ gs=(check(rpeaks,w)+check(cpeaks,h))/2
21
+ if gs>0.4: s,n=0.7,f"8Γ—8 grid artifacts (periodicity={gs:.2f})"
22
+ elif gs>0.2: s,n=0.3,f"Weak grid patterns ({gs:.2f})"
23
+ else: s,n=-0.2,"No grid artifacts"
24
+ return {"test":"FFT Grid 8Γ—8","periodicity":round(gs,4),"score":s,"note":n,"magnitude_spectrum":mag}
25
+
26
+ def m02_fft_grid_16x16(img):
27
+ gray=_g(img); h,w=gray.shape; fft=np.fft.fftshift(np.fft.fft2(gray)); mag=np.log(np.abs(fft)+1)
28
+ cy,cx=h//2,w//2
29
+ # Check specifically for 16Γ—16 period
30
+ e16_h,e16_w=h//16,w//16
31
+ peaks_h=[mag[cy,cx+k*e16_w] if cx+k*e16_w<w else 0 for k in range(1,8)]
32
+ peaks_v=[mag[cy+k*e16_h,cx] if cy+k*e16_h<h else 0 for k in range(1,8)]
33
+ avg_peak=(float(np.mean(peaks_h))+float(np.mean(peaks_v)))/2
34
+ bg=float(np.median(mag))
35
+ ratio=avg_peak/(bg+1e-9)
36
+ if ratio>1.5: s,n=0.5,f"16Γ—16 spectral peaks (ratio={ratio:.2f})"
37
+ elif ratio>1.2: s,n=0.2,f"Mild 16Γ—16 peaks ({ratio:.2f})"
38
+ else: s,n=-0.1,f"No 16Γ—16 artifacts ({ratio:.2f})"
39
+ return {"test":"FFT Grid 16Γ—16","peak_ratio":round(ratio,3),"score":s,"note":n}
40
+
41
+ def m03_spectral_slope(img):
42
+ gray=_g(img); h,w=gray.shape; fft=np.fft.fftshift(np.fft.fft2(gray)); power=np.abs(fft)**2
43
+ cy,cx=h//2,w//2; Y,X=np.mgrid[0:h,0:w]; R=np.sqrt((X-cx)**2+(Y-cy)**2).astype(int)
44
+ maxr=min(cy,cx); rp=np.zeros(maxr)
45
+ for r in range(maxr):
46
+ m=R==r
47
+ if m.any(): rp[r]=float(np.mean(power[m]))
48
+ rp=np.log(rp+1); freqs=np.arange(1,maxr); lf=np.log(freqs+1); lp=rp[1:maxr]
49
+ if len(lf)>10:
50
+ c=np.polyfit(lf,lp,1); slope=float(c[0]); dev=abs(slope-(-2.0))
51
+ else: slope=0; dev=2
52
+ if dev<0.5: s,n=-0.3,f"Natural 1/fΒ² slope ({slope:.2f})"
53
+ elif dev>1.5: s,n=0.3,f"Unnatural spectral slope ({slope:.2f})"
54
+ else: s,n=0.1,f"Slope deviation={dev:.2f}"
55
+ return {"test":"Spectral Slope 1/fΒ²","slope":round(slope,3),"score":s,"note":n}
56
+
57
+ def m04_diffusion_notches(img):
58
+ gray=_g(img); fft=np.fft.fftshift(np.fft.fft2(gray)); power=np.abs(fft)**2
59
+ h,w=power.shape; cy,cx=h//2,w//2; Y,X=np.mgrid[0:h,0:w]; R=np.sqrt((X-cx)**2+(Y-cy)**2).astype(int)
60
+ maxr=min(cy,cx); rp=np.zeros(maxr)
61
+ for r in range(maxr):
62
+ m=R==r
63
+ if m.any(): rp[r]=float(np.mean(power[m]))
64
+ rp=np.log(rp+1)
65
+ if len(rp)>20:
66
+ c=np.polyfit(np.log(np.arange(1,maxr)+1),rp[1:maxr],1); fitted=np.polyval(c,np.log(np.arange(1,maxr)+1))
67
+ res=rp[1:maxr]-fitted; notches,_=find_peaks(-res,prominence=0.5); nn=len(notches)
68
+ else: nn=0
69
+ if nn>3: s,n=0.5,f"{nn} spectral notches β€” diffusion signature"
70
+ elif nn>1: s,n=0.2,f"{nn} notches"
71
+ else: s,n=-0.1,"No diffusion notches"
72
+ return {"test":"Diffusion Notches","count":nn,"score":s,"note":n}
73
+
74
+ def m05_autocorrelation(img):
75
+ gray=_g(img); fft=np.fft.fft2(gray); power=np.abs(fft)**2
76
+ ac=np.real(np.fft.ifft2(power)); ac=np.fft.fftshift(ac); ac=ac/(ac.max()+1e-9)
77
+ h,w=ac.shape; cy,cx=h//2,w//2; acm=ac.copy()
78
+ re=max(h,w)//20; Y,X=np.mgrid[0:h,0:w]; cm=((X-cx)**2+(Y-cy)**2)<re**2; acm[cm]=0
79
+ ms=float(np.max(acm))
80
+ if ms>0.3: s,n=0.6,f"Strong secondary peak ({ms:.3f}) β€” GAN checkerboard"
81
+ elif ms>0.15: s,n=0.3,f"Moderate peak ({ms:.3f})"
82
+ else: s,n=-0.2,f"Natural autocorrelation ({ms:.3f})"
83
+ return {"test":"Autocorrelation Peak","max_secondary":round(ms,4),"score":s,"note":n}
84
+
85
+ def m06_checkerboard(img):
86
+ gray=_g(img); h,w=gray.shape
87
+ if h<10 or w<10: return {"test":"Checkerboard Pattern","score":0.0,"note":"Too small"}
88
+ ha=float(np.corrcoef(gray[:,:-2].ravel()[::100],gray[:,2:].ravel()[::100])[0,1])
89
+ va=float(np.corrcoef(gray[:-2,:].ravel()[::100],gray[2:,:].ravel()[::100])[0,1])
90
+ h1=float(np.corrcoef(gray[:,:-1].ravel()[::100],gray[:,1:].ravel()[::100])[0,1])
91
+ v1=float(np.corrcoef(gray[:-1,:].ravel()[::100],gray[1:,:].ravel()[::100])[0,1])
92
+ delta=((ha-h1)+(va-v1))/2
93
+ if delta>0.02: s,n=0.5,f"Checkerboard detected (Ξ”={delta:.4f})"
94
+ elif delta>0.005: s,n=0.2,f"Mild checkerboard ({delta:.4f})"
95
+ else: s,n=-0.1,f"No checkerboard ({delta:.4f})"
96
+ return {"test":"Checkerboard Pattern","delta":round(delta,6),"score":s,"note":n}
97
+
98
+ def m07_vae_boundaries(img):
99
+ gray=_g(img); h,w=gray.shape; best_r,best_s=1.0,0
100
+ for ps in [32,64,128]:
101
+ if h<ps*2 or w<ps*2: continue
102
+ hc,wc=(h//ps)*ps,(w//ps)*ps; g=gray[:hc,:wc]
103
+ bd,it=[],[]
104
+ for i in range(1,hc):
105
+ rd=np.abs(g[i,:]-g[i-1,:])
106
+ if i%ps==0: bd.append(float(np.mean(rd)))
107
+ elif i%ps!=1: it.append(float(np.mean(rd)))
108
+ if bd and it:
109
+ r=float(np.mean(bd))/(float(np.mean(it))+1e-9)
110
+ if r>best_r: best_r=r; best_s=ps
111
+ if best_r>1.3: s,n=0.4,f"VAE boundaries at {best_s}px ({best_r:.3f})"
112
+ elif best_r>1.1: s,n=0.2,f"Weak boundaries ({best_r:.3f})"
113
+ else: s,n=-0.1,f"No VAE boundaries ({best_r:.3f})"
114
+ return {"test":"VAE Patch Boundaries","ratio":round(best_r,4),"score":s,"note":n}
115
+
116
+ def m08_spectral_symmetry(img):
117
+ gray=_g(img); fft=np.fft.fftshift(np.fft.fft2(gray)); mag=np.log(np.abs(fft)+1)
118
+ h,w=mag.shape; cy,cx=h//2,w//2
119
+ top=mag[:cy,:]; bot=np.flipud(mag[cy+1:,:])
120
+ left=mag[:,:cx]; right=np.fliplr(mag[:,cx+1:])
121
+ mh,mw=min(top.shape[0],bot.shape[0]),min(top.shape[1],bot.shape[1])
122
+ asym_tb=float(np.mean(np.abs(top[:mh,:mw]-bot[:mh,:mw])))
123
+ mh2,mw2=min(left.shape[0],right.shape[0]),min(left.shape[1],right.shape[1])
124
+ asym_lr=float(np.mean(np.abs(left[:mh2,:mw2]-right[:mh2,:mw2])))
125
+ asym=(asym_tb+asym_lr)/2
126
+ if asym<0.1: s,n=-0.1,f"Symmetric spectrum ({asym:.3f})"
127
+ elif asym>0.5: s,n=0.3,f"Asymmetric spectrum ({asym:.3f})"
128
+ else: s,n=0.0,f"Spectral asymmetry={asym:.3f}"
129
+ return {"test":"Spectral Symmetry","asymmetry":round(asym,4),"score":s,"note":n}
130
+
131
+ def m09_upsampling_stride(img):
132
+ gray=_g(img); h,w=gray.shape
133
+ # Check for stride-2 upsampling artifacts
134
+ even=gray[::2,::2]; odd=gray[1::2,1::2]
135
+ mh,mw=min(even.shape[0],odd.shape[0]),min(even.shape[1],odd.shape[1])
136
+ diff=float(np.mean(np.abs(even[:mh,:mw]-odd[:mh,:mw])))
137
+ mean_val=float(np.mean(gray))
138
+ norm_diff=diff/(mean_val+1e-9)
139
+ if norm_diff>0.1: s,n=-0.1,f"Natural stride variation ({norm_diff:.4f})"
140
+ elif norm_diff<0.01: s,n=0.3,f"Stride-2 artifacts ({norm_diff:.4f})"
141
+ else: s,n=0.0,f"Stride diff={norm_diff:.4f}"
142
+ return {"test":"Upsampling Stride-2","norm_diff":round(norm_diff,4),"score":s,"note":n}
143
+
144
+ def m10_patch_diversity(img):
145
+ gray=_g(img); h,w=gray.shape; ps=32
146
+ hc,wc=(h//ps)*ps,(w//ps)*ps; gray=gray[:hc,:wc]
147
+ patches=[]
148
+ for i in range(0,hc,ps):
149
+ for j in range(0,wc,ps):
150
+ patches.append(gray[i:i+ps,j:j+ps].ravel())
151
+ if len(patches)<4: return {"test":"Patch Diversity","score":0.0,"note":"Too few patches"}
152
+ patches=np.array(patches); means=np.mean(patches,axis=1); stds=np.std(patches,axis=1)
153
+ diversity=float(np.std(stds)/(np.mean(stds)+1e-9))
154
+ if diversity>0.5: s,n=-0.2,f"High patch diversity ({diversity:.3f}) β€” natural"
155
+ elif diversity<0.15: s,n=0.3,f"Low patch diversity ({diversity:.3f}) β€” GAN mode collapse"
156
+ else: s,n=0.0,f"Patch diversity={diversity:.3f}"
157
+ return {"test":"Patch Diversity","diversity":round(diversity,4),"score":s,"note":n}
158
+
159
+ def m11_color_consistency(img):
160
+ rgb=np.array(img.convert("RGB")).astype(np.float64); h,w,_=rgb.shape; ps=64
161
+ hc,wc=(h//ps)*ps,(w//ps)*ps; rgb=rgb[:hc,:wc]
162
+ ratios=[]
163
+ for i in range(0,hc,ps):
164
+ for j in range(0,wc,ps):
165
+ p=rgb[i:i+ps,j:j+ps]; m=np.mean(p,axis=(0,1))
166
+ if m[1]>10: ratios.append(m[0]/(m[1]+1e-9))
167
+ if len(ratios)<4: return {"test":"Color Ratio Consistency","score":0.0,"note":"Few patches"}
168
+ cv=float(np.std(ratios))/(float(np.mean(ratios))+1e-9)
169
+ if cv>0.1: s,n=-0.2,f"Varied color ratios (CV={cv:.3f})"
170
+ elif cv<0.02: s,n=0.2,f"Suspiciously uniform color ({cv:.3f})"
171
+ else: s,n=0.0,f"Color CV={cv:.3f}"
172
+ return {"test":"Color Ratio Consistency","cv":round(cv,4),"score":s,"note":n}
173
+
174
+ def m12_spectral_rolloff_shape(img):
175
+ gray=_g(img); fft=np.abs(np.fft.fftshift(np.fft.fft2(gray)))
176
+ h,w=fft.shape; cy,cx=h//2,w//2
177
+ diag1=np.array([fft[cy+i,cx+i] for i in range(min(cy,cx)//2)])
178
+ diag2=np.array([fft[cy+i,cx-i] for i in range(min(cy,cx)//2)])
179
+ if len(diag1)>5:
180
+ d1=np.log(diag1+1); d2=np.log(diag2+1)
181
+ aniso=float(np.mean(np.abs(d1-d2)))/(float(np.mean(d1))+1e-9)
182
+ else: aniso=0
183
+ if aniso>0.1: s,n=-0.1,f"Anisotropic rolloff ({aniso:.3f})"
184
+ elif aniso<0.02: s,n=0.2,f"Isotropic rolloff ({aniso:.3f}) β€” AI-like"
185
+ else: s,n=0.0,f"Rolloff anisotropy={aniso:.3f}"
186
+ return {"test":"Spectral Rolloff Shape","anisotropy":round(aniso,4),"score":s,"note":n}
187
+
188
+ def m13_texture_repetition(img):
189
+ gray=_g(img); h,w=gray.shape; ps=64
190
+ if h<ps*3 or w<ps*3: return {"test":"Texture Repetition","score":0.0,"note":"Too small"}
191
+ hc,wc=(h//ps)*ps,(w//ps)*ps; gray=gray[:hc,:wc]
192
+ patches=[]
193
+ for i in range(0,hc,ps):
194
+ for j in range(0,wc,ps):
195
+ p=gray[i:i+ps,j:j+ps]; p=(p-np.mean(p))/(np.std(p)+1e-9)
196
+ patches.append(p.ravel())
197
+ if len(patches)<4: return {"test":"Texture Repetition","score":0.0,"note":"Few patches"}
198
+ patches=np.array(patches)
199
+ # Find max correlation between non-adjacent patches
200
+ max_corr=0
201
+ for i in range(min(len(patches),20)):
202
+ for j in range(i+2,min(len(patches),20)):
203
+ c=float(np.corrcoef(patches[i],patches[j])[0,1])
204
+ if c>max_corr: max_corr=c
205
+ if max_corr>0.8: s,n=0.4,f"Repeated textures ({max_corr:.3f}) β€” GAN copy"
206
+ elif max_corr>0.5: s,n=0.2,f"Similar textures ({max_corr:.3f})"
207
+ else: s,n=-0.1,f"Varied textures ({max_corr:.3f})"
208
+ return {"test":"Texture Repetition","max_corr":round(max_corr,4),"score":s,"note":n}
209
+
210
+ def m14_highfreq_noise_structure(img):
211
+ gray=_g(img); noise=gray-gaussian_filter(gray,1.0)
212
+ fft=np.abs(np.fft.fftshift(np.fft.fft2(noise))); h,w=fft.shape; cy,cx=h//2,w//2
213
+ # Radial power in HF noise
214
+ Y,X=np.mgrid[0:h,0:w]; R=np.sqrt((X-cx)**2+(Y-cy)**2)
215
+ Rm=min(cy,cx); hf=fft[R>Rm*0.5]; lf=fft[R<Rm*0.3]
216
+ ratio=float(np.mean(hf))/(float(np.mean(lf))+1e-9)
217
+ if ratio>2: s,n=-0.2,f"HF-dominant noise ({ratio:.2f}) β€” sensor"
218
+ elif ratio<0.5: s,n=0.3,f"LF-dominant noise ({ratio:.2f}) β€” AI smoothing"
219
+ else: s,n=0.0,f"Noise HF/LF={ratio:.2f}"
220
+ return {"test":"HF Noise Structure","ratio":round(ratio,3),"score":s,"note":n}
221
+
222
+ def m15_phase_coherence(img):
223
+ gray=_g(img); fft=np.fft.fft2(gray); phase=np.angle(fft)
224
+ h,w=phase.shape
225
+ # Natural images: smooth phase transitions
226
+ ph_dx=np.abs(np.diff(phase,axis=1)); ph_dy=np.abs(np.diff(phase,axis=0))
227
+ # Wrap-around correction
228
+ ph_dx[ph_dx>np.pi]=2*np.pi-ph_dx[ph_dx>np.pi]
229
+ ph_dy[ph_dy>np.pi]=2*np.pi-ph_dy[ph_dy>np.pi]
230
+ smoothness=float(np.mean(ph_dx)+np.mean(ph_dy))
231
+ if smoothness<2: s,n=-0.2,f"Coherent phase ({smoothness:.3f})"
232
+ elif smoothness>2.5: s,n=0.2,f"Incoherent phase ({smoothness:.3f})"
233
+ else: s,n=0.0,f"Phase coherence={smoothness:.3f}"
234
+ return {"test":"Phase Coherence","smoothness":round(smoothness,4),"score":s,"note":n}
235
+
236
+ ALL_TESTS=[m01_fft_grid_8x8,m02_fft_grid_16x16,m03_spectral_slope,m04_diffusion_notches,
237
+ m05_autocorrelation,m06_checkerboard,m07_vae_boundaries,m08_spectral_symmetry,
238
+ m09_upsampling_stride,m10_patch_diversity,m11_color_consistency,m12_spectral_rolloff_shape,
239
+ m13_texture_repetition,m14_highfreq_noise_structure,m15_phase_coherence]
240
+
241
+ def run_model_agent(img):
242
+ findings,scores=[],[]
243
+ for fn in ALL_TESTS:
244
+ try: r=fn(img); findings.append(r); scores.append(r["score"])
245
+ except Exception as e: findings.append({"test":fn.__name__,"error":str(e),"score":0})
246
+ avg=float(np.mean(scores)) if scores else 0.0; conf=min(1.0,0.5+0.5*abs(avg))
247
+ viol=[f["test"] for f in findings if f.get("score",0)>0.2]
248
+ comp=[f["test"] for f in findings if f.get("score",0)<-0.1]
249
+ rat=f"Model signatures: {', '.join(viol)}." if viol else f"No artifacts: {', '.join(comp)}." if comp else "Model analysis inconclusive."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  for f in findings:
251
+ if f.get("note"): rat+=f" [{f['test']}]: {f['note']}."
252
+ return AgentEvidence("Generative Model Agent",np.clip(avg,-1,1),conf,max(0,1-len(scores)/len(ALL_TESTS)),rat,findings)