File size: 1,322 Bytes
ad1bffa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
"""Example usage of the ANCHOR+DEUP Unified UQ Pipeline."""

from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from anchor_deup import UnifiedUQPipeline
import numpy as np

# Load data
data = fetch_california_housing()
X, y = data.data.astype(np.float64), data.target.reshape(-1, 1).astype(np.float64)

# Split
Xt, X_test, yt, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
X_train, X_val, y_train, y_val = train_test_split(Xt, yt, test_size=0.3, random_state=42)

# Scale
sx = StandardScaler()
X_train = sx.fit_transform(X_train)
X_val = sx.transform(X_val)
X_test = sx.transform(X_test)

sy = StandardScaler()
y_train = sy.fit_transform(y_train)
y_val = sy.transform(y_val)
y_test = sy.transform(y_test)

# Train pipeline
pipeline = UnifiedUQPipeline(X_train.shape[1])
pipeline.fit(X_train, y_train, X_val, y_val, main_epochs=150, error_epochs=150)

# Predict with uncertainty
mean_pred, total_unc, epistemic_unc, aleatoric_unc = pipeline.predict(X_test, n_anchors=100)

print(f"Mean prediction shape: {mean_pred.shape}")
print(f"Total uncertainty shape: {total_unc.shape}")
print(f"Epistemic uncertainty shape: {epistemic_unc.shape}")
print(f"Aleatoric uncertainty shape: {aleatoric_unc.shape}")