integration_test2 / evaluation /run_channel_ablation_only.py
Abdelrahman Almatrooshi
FocusGuard with L2CS-Net gaze estimation
7b53d75
"""Run only channel ablation LOPO (no leave-one-out). Quick run for paper data."""
import os
import sys
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import f1_score
from xgboost import XGBClassifier
_PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, _PROJECT_ROOT)
from data_preparation.prepare_dataset import load_per_person, SELECTED_FEATURES
SEED = 42
FEATURES = SELECTED_FEATURES["face_orientation"]
CHANNEL_SUBSETS = {
"head_pose": ["head_deviation", "s_face", "pitch"],
"eye_state": ["ear_left", "ear_avg", "ear_right", "perclos"],
"gaze": ["h_gaze", "gaze_offset", "s_eye"],
}
def main():
by_person, _, _ = load_per_person("face_orientation")
persons = sorted(by_person.keys())
results = {}
for subset_name, feat_list in CHANNEL_SUBSETS.items():
idx_keep = [FEATURES.index(f) for f in feat_list]
f1s = []
for held_out in persons:
train_X = np.concatenate([by_person[p][0] for p in persons if p != held_out])
train_y = np.concatenate([by_person[p][1] for p in persons if p != held_out])
X_test, y_test = by_person[held_out]
X_tr = train_X[:, idx_keep]
X_te = X_test[:, idx_keep]
scaler = StandardScaler().fit(X_tr)
xgb = XGBClassifier(n_estimators=600, max_depth=8, learning_rate=0.05,
subsample=0.8, colsample_bytree=0.8, reg_alpha=0.1, reg_lambda=1.0,
eval_metric="logloss", random_state=SEED, verbosity=0)
xgb.fit(scaler.transform(X_tr), train_y)
pred = xgb.predict(scaler.transform(X_te))
f1s.append(f1_score(y_test, pred, average="weighted"))
results[subset_name] = np.mean(f1s)
print(f"{subset_name}: {results[subset_name]:.4f}")
# baseline
f1s = []
for held_out in persons:
train_X = np.concatenate([by_person[p][0] for p in persons if p != held_out])
train_y = np.concatenate([by_person[p][1] for p in persons if p != held_out])
X_test, y_test = by_person[held_out]
scaler = StandardScaler().fit(train_X)
xgb = XGBClassifier(n_estimators=600, max_depth=8, learning_rate=0.05,
subsample=0.8, colsample_bytree=0.8, reg_alpha=0.1, reg_lambda=1.0,
eval_metric="logloss", random_state=SEED, verbosity=0)
xgb.fit(scaler.transform(train_X), train_y)
pred = xgb.predict(scaler.transform(X_test))
f1s.append(f1_score(y_test, pred, average="weighted"))
results["all_10"] = np.mean(f1s)
print(f"all_10: {results['all_10']:.4f}")
return results
if __name__ == "__main__":
main()