anky2002 commited on
Commit
37025de
Β·
verified Β·
1 Parent(s): bd3a66b

Upload agents/statistical_agent.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. agents/statistical_agent.py +144 -2
agents/statistical_agent.py CHANGED
@@ -10,6 +10,7 @@ import numpy as np
10
  from PIL import Image
11
  from scipy.fftpack import dct
12
  from scipy.stats import kurtosis as scipy_kurtosis, entropy
 
13
  from typing import Dict, Any
14
 
15
  from agents.optical_agent import AgentEvidence
@@ -179,13 +180,154 @@ def analyze_gradient_sparsity(img: Image.Image) -> Dict[str, Any]:
179
  }
180
 
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  # ─── Main Agent Entry Point ─────────────────────────────────────────
183
  def run_statistical_agent(img: Image.Image) -> AgentEvidence:
184
  """Run all statistical priors tests."""
185
  findings = []
186
  scores = []
187
 
188
- for fn in [analyze_dct_distribution, analyze_benford, analyze_gradient_sparsity]:
 
189
  try:
190
  result = fn(img)
191
  findings.append(result)
@@ -214,7 +356,7 @@ def run_statistical_agent(img: Image.Image) -> AgentEvidence:
214
  agent_name="Statistical Priors Agent",
215
  violation_score=np.clip(avg_score, -1, 1),
216
  confidence=confidence,
217
- failure_prob=max(0.0, 1.0 - len(scores) / 3),
218
  rationale=rationale,
219
  sub_findings=findings,
220
  )
 
10
  from PIL import Image
11
  from scipy.fftpack import dct
12
  from scipy.stats import kurtosis as scipy_kurtosis, entropy
13
+ from scipy.ndimage import gaussian_filter
14
  from typing import Dict, Any
15
 
16
  from agents.optical_agent import AgentEvidence
 
180
  }
181
 
182
 
183
+ # ─── Local Kurtosis Map ──────────────────────────────────────────────
184
+ def analyze_local_kurtosis(img: Image.Image) -> Dict[str, Any]:
185
+ """
186
+ Natural images have spatially varying kurtosis (textured vs smooth).
187
+ AI images often have unnaturally uniform local statistics.
188
+ """
189
+ gray = np.array(img.convert("L")).astype(np.float64)
190
+ h, w = gray.shape
191
+ block_size = 32
192
+ h_crop, w_crop = (h // block_size) * block_size, (w // block_size) * block_size
193
+ gray = gray[:h_crop, :w_crop]
194
+
195
+ local_kurts = []
196
+ for i in range(0, h_crop, block_size):
197
+ for j in range(0, w_crop, block_size):
198
+ block = gray[i:i + block_size, j:j + block_size].ravel()
199
+ if np.std(block) > 1:
200
+ local_kurts.append(float(scipy_kurtosis(block, fisher=True)))
201
+
202
+ if len(local_kurts) < 10:
203
+ return {"test": "Local Kurtosis Map", "score": 0.0, "note": "Insufficient blocks"}
204
+
205
+ local_kurts = np.array(local_kurts)
206
+ kurt_std = float(np.std(local_kurts))
207
+ kurt_mean = float(np.mean(local_kurts))
208
+
209
+ # Natural images: high variation in local kurtosis
210
+ if kurt_std > 3.0:
211
+ score = -0.3
212
+ note = f"High local kurtosis variation (Οƒ={kurt_std:.2f}, natural spatial statistics)"
213
+ elif kurt_std < 1.0:
214
+ score = 0.4
215
+ note = f"Unnaturally uniform local statistics (Οƒ={kurt_std:.2f}, AI-like)"
216
+ else:
217
+ score = 0.0
218
+ note = f"Moderate local kurtosis variation (Οƒ={kurt_std:.2f})"
219
+
220
+ return {
221
+ "test": "Local Kurtosis Map",
222
+ "kurtosis_std": round(kurt_std, 4),
223
+ "kurtosis_mean": round(kurt_mean, 4),
224
+ "score": score,
225
+ "note": note,
226
+ }
227
+
228
+
229
+ # ─── Color Histogram Analysis ───────────────────────────────────────
230
+ def analyze_color_histogram(img: Image.Image) -> Dict[str, Any]:
231
+ """
232
+ Natural images have smooth, continuous color histograms.
233
+ AI/GAN images can show comb-like gaps or unusual peaks in histograms.
234
+ """
235
+ rgb = np.array(img.convert("RGB"))
236
+
237
+ anomaly_scores = []
238
+ for c, name in enumerate(["Red", "Green", "Blue"]):
239
+ hist, _ = np.histogram(rgb[:, :, c].ravel(), bins=256, range=(0, 256))
240
+ hist = hist.astype(np.float64)
241
+
242
+ # Check for gaps (zero bins surrounded by non-zero)
243
+ zero_bins = np.sum(hist == 0)
244
+
245
+ # Check for comb pattern (alternating zero/nonzero)
246
+ diffs = np.diff((hist > 0).astype(int))
247
+ transitions = int(np.sum(np.abs(diffs)))
248
+
249
+ # Smoothness: ratio of histogram derivative to histogram
250
+ hist_smooth = gaussian_filter(hist.astype(np.float64), sigma=2)
251
+ smoothness = float(np.mean(np.abs(hist - hist_smooth)) / (np.mean(hist) + 1e-9))
252
+
253
+ anomaly_scores.append(smoothness)
254
+
255
+ avg_smoothness = float(np.mean(anomaly_scores))
256
+
257
+ if avg_smoothness < 0.3:
258
+ score = -0.2
259
+ note = f"Smooth color histograms (smoothness={avg_smoothness:.3f}, natural)"
260
+ elif avg_smoothness > 0.8:
261
+ score = 0.4
262
+ note = f"Irregular color histograms (smoothness={avg_smoothness:.3f}, manipulation artifact)"
263
+ else:
264
+ score = 0.0
265
+ note = f"Normal histogram smoothness ({avg_smoothness:.3f})"
266
+
267
+ return {
268
+ "test": "Color Histogram Analysis",
269
+ "histogram_smoothness": round(avg_smoothness, 4),
270
+ "score": score,
271
+ "note": note,
272
+ }
273
+
274
+
275
+ # ─── Wavelet Coefficient Distribution ───────────────────────────────
276
+ def analyze_wavelet_coefficients(img: Image.Image) -> Dict[str, Any]:
277
+ """
278
+ Natural image wavelet coefficients follow generalized Gaussian.
279
+ AI images show deviations, especially at high-frequency subbands.
280
+ Uses Haar wavelet (simple, no pywt dependency needed).
281
+ """
282
+ gray = np.array(img.convert("L")).astype(np.float64)
283
+ h, w = gray.shape
284
+ h2, w2 = h // 2 * 2, w // 2 * 2
285
+ gray = gray[:h2, :w2]
286
+
287
+ # Simple Haar wavelet decomposition
288
+ # Level 1
289
+ ll = (gray[0::2, 0::2] + gray[0::2, 1::2] + gray[1::2, 0::2] + gray[1::2, 1::2]) / 4
290
+ lh = (gray[0::2, 0::2] + gray[0::2, 1::2] - gray[1::2, 0::2] - gray[1::2, 1::2]) / 4
291
+ hl = (gray[0::2, 0::2] - gray[0::2, 1::2] + gray[1::2, 0::2] - gray[1::2, 1::2]) / 4
292
+ hh = (gray[0::2, 0::2] - gray[0::2, 1::2] - gray[1::2, 0::2] + gray[1::2, 1::2]) / 4
293
+
294
+ # Analyze high-frequency subbands
295
+ hf_coeffs = np.concatenate([lh.ravel(), hl.ravel(), hh.ravel()])
296
+ hf_coeffs = hf_coeffs[hf_coeffs != 0]
297
+
298
+ if len(hf_coeffs) < 100:
299
+ return {"test": "Wavelet Coefficients", "score": 0.0, "note": "Insufficient data"}
300
+
301
+ kurt = float(scipy_kurtosis(hf_coeffs, fisher=True))
302
+ # Generalized Gaussian: natural images have kurtosis > 3
303
+ # AI images: often lower kurtosis (more Gaussian-like)
304
+
305
+ if kurt > 5.0:
306
+ score = -0.3
307
+ note = f"Heavy-tailed wavelet coefficients (kurtosis={kurt:.2f}, natural)"
308
+ elif kurt < 1.5:
309
+ score = 0.4
310
+ note = f"Gaussian-like wavelet coefficients (kurtosis={kurt:.2f}, AI-like)"
311
+ else:
312
+ score = 0.0
313
+ note = f"Wavelet kurtosis={kurt:.2f}"
314
+
315
+ return {
316
+ "test": "Wavelet Coefficients",
317
+ "hf_kurtosis": round(kurt, 4),
318
+ "score": score,
319
+ "note": note,
320
+ }
321
+
322
+
323
  # ─── Main Agent Entry Point ─────────────────────────────────────────
324
  def run_statistical_agent(img: Image.Image) -> AgentEvidence:
325
  """Run all statistical priors tests."""
326
  findings = []
327
  scores = []
328
 
329
+ for fn in [analyze_dct_distribution, analyze_benford, analyze_gradient_sparsity,
330
+ analyze_local_kurtosis, analyze_color_histogram, analyze_wavelet_coefficients]:
331
  try:
332
  result = fn(img)
333
  findings.append(result)
 
356
  agent_name="Statistical Priors Agent",
357
  violation_score=np.clip(avg_score, -1, 1),
358
  confidence=confidence,
359
+ failure_prob=max(0.0, 1.0 - len(scores) / 6),
360
  rationale=rationale,
361
  sub_findings=findings,
362
  )