Upload soccer_feature_engineering.py
Browse files- soccer_feature_engineering.py +164 -114
soccer_feature_engineering.py
CHANGED
|
@@ -1,157 +1,207 @@
|
|
| 1 |
"""
|
| 2 |
-
Soccer Feature Engineering Pipeline
|
| 3 |
-
====================================
|
| 4 |
Engineers 33 match-level, team-level features from SkillCorner dynamic_events.csv.
|
| 5 |
Based on the Kaggle Soccer Feature Engineering Hackathon requirements.
|
| 6 |
|
| 7 |
-
|
| 8 |
-
|
| 9 |
|
| 10 |
-
|
| 11 |
-
|
| 12 |
"""
|
| 13 |
|
| 14 |
import glob
|
| 15 |
-
import json
|
| 16 |
import os
|
| 17 |
|
| 18 |
-
import numpy as np
|
| 19 |
import pandas as pd
|
| 20 |
|
| 21 |
|
| 22 |
def discover_dynamic_events_files(data_root="/app/opendata/data/matches"):
|
| 23 |
"""Dynamically discover all *_dynamic_events.csv files via glob."""
|
| 24 |
-
pattern = os.path.join(data_root, "*", "*_dynamic_events.csv")
|
| 25 |
files = glob.glob(pattern, recursive=True)
|
| 26 |
files.sort()
|
| 27 |
return files
|
| 28 |
|
| 29 |
|
| 30 |
-
def
|
| 31 |
-
"""Load matches.json to get home/away team mapping."""
|
| 32 |
-
path = os.path.join(data_root, "matches.json")
|
| 33 |
-
with open(path, "r") as f:
|
| 34 |
-
matches = json.load(f)
|
| 35 |
-
mapping = {}
|
| 36 |
-
for m in matches:
|
| 37 |
-
mapping[str(m["id"])] = {
|
| 38 |
-
"home_team_id": m["home_team"]["id"],
|
| 39 |
-
"away_team_id": m["away_team"]["id"],
|
| 40 |
-
"home_team_name": m["home_team"]["short_name"],
|
| 41 |
-
"away_team_name": m["away_team"]["short_name"],
|
| 42 |
-
}
|
| 43 |
-
return mapping
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
def compute_features_for_match(dynamic_events_path, match_meta=None):
|
| 47 |
"""
|
| 48 |
-
Compute
|
| 49 |
-
|
| 50 |
"""
|
| 51 |
-
df = pd.read_csv(dynamic_events_path, low_memory=False)
|
| 52 |
match_id = df["match_id"].iloc[0]
|
| 53 |
-
team_ids = sorted(df["team_id"].unique().tolist())
|
| 54 |
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
|
| 114 |
|
| 115 |
def run_pipeline(data_root="/app/opendata/data/matches", output_path="/app/features.csv"):
|
| 116 |
files = discover_dynamic_events_files(data_root)
|
| 117 |
print(f"Discovered {len(files)} dynamic_events.csv files")
|
| 118 |
|
| 119 |
-
|
| 120 |
for f in files:
|
| 121 |
try:
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
except Exception as e:
|
| 126 |
-
print(f"
|
| 127 |
|
| 128 |
-
if not
|
| 129 |
raise ValueError("No valid match files processed.")
|
| 130 |
|
| 131 |
-
features_df = pd.
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
|
| 150 |
features_df.to_csv(output_path, index=False)
|
| 151 |
-
print(f"\nWrote {len(features_df)} rows
|
| 152 |
print(f"Shape: {features_df.shape}")
|
| 153 |
return features_df
|
| 154 |
|
| 155 |
|
| 156 |
if __name__ == "__main__":
|
| 157 |
-
run_pipeline()
|
|
|
|
| 1 |
"""
|
| 2 |
+
Soccer Feature Engineering Pipeline - Competition Version
|
| 3 |
+
===========================================================
|
| 4 |
Engineers 33 match-level, team-level features from SkillCorner dynamic_events.csv.
|
| 5 |
Based on the Kaggle Soccer Feature Engineering Hackathon requirements.
|
| 6 |
|
| 7 |
+
This script uses the EXACT column naming convention from the reference notebook
|
| 8 |
+
by Dev0907 to ensure full compatibility with competition evaluation.
|
| 9 |
|
| 10 |
+
Input: SkillCorner opendata repository (or any folder with *_dynamic_events.csv)
|
| 11 |
+
Output: features.csv - one row per team per match
|
| 12 |
"""
|
| 13 |
|
| 14 |
import glob
|
|
|
|
| 15 |
import os
|
| 16 |
|
|
|
|
| 17 |
import pandas as pd
|
| 18 |
|
| 19 |
|
| 20 |
def discover_dynamic_events_files(data_root="/app/opendata/data/matches"):
|
| 21 |
"""Dynamically discover all *_dynamic_events.csv files via glob."""
|
| 22 |
+
pattern = os.path.join(data_root, "**", "*_dynamic_events.csv")
|
| 23 |
files = glob.glob(pattern, recursive=True)
|
| 24 |
files.sort()
|
| 25 |
return files
|
| 26 |
|
| 27 |
|
| 28 |
+
def compute_team_features(df, team):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
"""
|
| 30 |
+
Compute 33 aggregated match-level features for a single team in a single match.
|
| 31 |
+
Uses EXACT column names from the reference notebook by Dev0907.
|
| 32 |
"""
|
|
|
|
| 33 |
match_id = df["match_id"].iloc[0]
|
|
|
|
| 34 |
|
| 35 |
+
team_df = df[df["team_id"] == team]
|
| 36 |
+
poss = team_df[team_df["event_type"] == "player_possession"]
|
| 37 |
+
obe = team_df[team_df["event_type"] == "on_ball_engagement"]
|
| 38 |
+
obr = team_df[team_df["event_type"] == "off_ball_run"]
|
| 39 |
+
|
| 40 |
+
passes = poss[poss["pass_outcome"].notna()]
|
| 41 |
+
|
| 42 |
+
# DIMENSION 1: ATTACKING STRUCTURE
|
| 43 |
+
att1 = int((passes["third_end"] == "attacking_third").sum())
|
| 44 |
+
|
| 45 |
+
carries = poss[poss["carry"] == True]
|
| 46 |
+
att2 = int((carries["third_end"] == "attacking_third").sum())
|
| 47 |
+
|
| 48 |
+
att3 = int(passes["n_opponents_bypassed"].clip(lower=0).sum())
|
| 49 |
+
|
| 50 |
+
att4 = int(passes["last_line_break"].sum()) if "last_line_break" in passes.columns else 0
|
| 51 |
+
|
| 52 |
+
att5 = int((passes["third_start"] == "attacking_third").sum())
|
| 53 |
+
|
| 54 |
+
# DIMENSION 2: BUILD-UP PROFILE
|
| 55 |
+
att6 = int((poss["team_in_possession_phase_type"] == "build_up").sum())
|
| 56 |
+
att7 = int((poss["team_in_possession_phase_type"] == "direct").sum())
|
| 57 |
+
att8 = int((poss["team_in_possession_phase_type"] == "set_play").sum())
|
| 58 |
+
att9 = int((poss["team_in_possession_phase_type"] == "quick_break").sum())
|
| 59 |
+
att10 = int((poss["team_in_possession_phase_type"] == "transition").sum())
|
| 60 |
+
|
| 61 |
+
# DIMENSION 3: POSSESSION QUALITY
|
| 62 |
+
att11 = int(poss["one_touch"].sum())
|
| 63 |
+
att12 = int(poss["quick_pass"].sum())
|
| 64 |
+
att13 = int(poss["lead_to_shot"].sum())
|
| 65 |
+
att14 = int(poss["lead_to_goal"].sum())
|
| 66 |
+
att15 = float(round(poss["delta_to_last_defensive_line_gain"].clip(lower=0).sum(), 2))
|
| 67 |
+
att16 = float(round(poss["last_defensive_line_height_gain"].clip(lower=0).sum(), 2))
|
| 68 |
+
att17 = int(poss["forward_momentum"].sum())
|
| 69 |
+
att18 = int(poss["n_passing_options"].sum())
|
| 70 |
+
att19 = int(poss["n_passing_options_dangerous_difficult"].sum())
|
| 71 |
+
att20 = int((obr["event_subtype"] == "run_ahead_of_the_ball").sum())
|
| 72 |
+
|
| 73 |
+
# DIMENSION 4: PRESSING & DEFENSIVE SHAPE
|
| 74 |
+
def1 = int(len(obe))
|
| 75 |
+
def2 = int((obe["event_subtype"] == "counter_press").sum())
|
| 76 |
+
def3 = int((obe["event_subtype"] == "recovery_press").sum())
|
| 77 |
+
|
| 78 |
+
chains = obe[obe["pressing_chain_length"].notna()]
|
| 79 |
+
chain_starts = chains[chains["index_in_pressing_chain"] == 1.0]
|
| 80 |
+
def4 = int(chain_starts["pressing_chain_length"].sum())
|
| 81 |
+
def5 = int(len(chain_starts))
|
| 82 |
+
def6 = int(chains["pressing_chain_length"].max()) if len(chains) > 0 else 0
|
| 83 |
+
|
| 84 |
+
def7 = int(obe["stop_possession_danger"].sum())
|
| 85 |
+
|
| 86 |
+
# DIMENSION 5: OFF-BALL MOVEMENT INTELLIGENCE
|
| 87 |
+
run1 = int(obr["break_defensive_line"].sum())
|
| 88 |
+
run2 = int(obr["push_defensive_line"].sum())
|
| 89 |
+
run3 = int((obr["event_subtype"] == "behind").sum())
|
| 90 |
+
run4 = int((obr["event_subtype"] == "overlap").sum())
|
| 91 |
+
run5 = int((obr["third_start"] == "attacking_third").sum())
|
| 92 |
+
gng = int(poss["initiate_give_and_go"].sum()) if "initiate_give_and_go" in poss.columns else 0
|
| 93 |
+
|
| 94 |
+
return {
|
| 95 |
+
"match_id": match_id,
|
| 96 |
+
"team_id": team,
|
| 97 |
+
# Attacking Structure
|
| 98 |
+
"att1_passes_into_final_third": att1,
|
| 99 |
+
"att2_carries_into_attacking_third": att2,
|
| 100 |
+
"att3_opponents_bypassed_by_passes": att3,
|
| 101 |
+
"att4_last_line_break_passes": att4,
|
| 102 |
+
"att5_passes_in_attacking_third": att5,
|
| 103 |
+
# Build-Up Profile
|
| 104 |
+
"att6_buildup_phase_events": att6,
|
| 105 |
+
"att7_direct_phase_events": att7,
|
| 106 |
+
"att8_setplay_events": att8,
|
| 107 |
+
"att9_quickbreak_events": att9,
|
| 108 |
+
"att10_transition_events": att10,
|
| 109 |
+
# Possession Quality
|
| 110 |
+
"att11_one_touch_passes": att11,
|
| 111 |
+
"att12_quick_passes": att12,
|
| 112 |
+
"att13_possessions_leading_to_shot": att13,
|
| 113 |
+
"att14_possessions_leading_to_goal": att14,
|
| 114 |
+
"att15_def_line_depth_total_pushed_m": att15,
|
| 115 |
+
"att16_def_line_height_total_pushed_m": att16,
|
| 116 |
+
"att17_forward_momentum_possessions": att17,
|
| 117 |
+
"att18_passing_options_total": att18,
|
| 118 |
+
"att19_dangerous_difficult_pass_options": att19,
|
| 119 |
+
"att20_runs_ahead_of_ball": att20,
|
| 120 |
+
# Defensive Pressing
|
| 121 |
+
"def1_total_defensive_engagements": def1,
|
| 122 |
+
"def2_counter_press_actions": def2,
|
| 123 |
+
"def3_recovery_press_actions": def3,
|
| 124 |
+
"def4_pressing_chain_total_length": def4,
|
| 125 |
+
"def5_pressing_chains_initiated": def5,
|
| 126 |
+
"def6_max_pressing_chain_length": def6,
|
| 127 |
+
"def7_danger_stopped": def7,
|
| 128 |
+
# Off-Ball Movement
|
| 129 |
+
"run1_line_breaking_runs": run1,
|
| 130 |
+
"run2_line_pushing_runs": run2,
|
| 131 |
+
"run3_runs_behind_defense": run3,
|
| 132 |
+
"run4_overlap_runs": run4,
|
| 133 |
+
"run5_attacking_third_runs": run5,
|
| 134 |
+
"att_give_and_go_initiated": gng,
|
| 135 |
+
}
|
| 136 |
|
| 137 |
|
| 138 |
def run_pipeline(data_root="/app/opendata/data/matches", output_path="/app/features.csv"):
|
| 139 |
files = discover_dynamic_events_files(data_root)
|
| 140 |
print(f"Discovered {len(files)} dynamic_events.csv files")
|
| 141 |
|
| 142 |
+
records = []
|
| 143 |
for f in files:
|
| 144 |
try:
|
| 145 |
+
df = pd.read_csv(f, low_memory=False)
|
| 146 |
+
match_id = df["match_id"].iloc[0]
|
| 147 |
+
teams = sorted(df["team_id"].unique().tolist())
|
| 148 |
+
for team in teams:
|
| 149 |
+
features = compute_team_features(df, team)
|
| 150 |
+
records.append(features)
|
| 151 |
+
print(f" match {match_id}: {len(teams)} teams")
|
| 152 |
except Exception as e:
|
| 153 |
+
print(f" ERROR {os.path.basename(f)}: {e}")
|
| 154 |
|
| 155 |
+
if not records:
|
| 156 |
raise ValueError("No valid match files processed.")
|
| 157 |
|
| 158 |
+
features_df = pd.DataFrame(records)
|
| 159 |
+
|
| 160 |
+
# Ensure exact column order as in reference notebook
|
| 161 |
+
col_order = [
|
| 162 |
+
"match_id", "team_id",
|
| 163 |
+
"att1_passes_into_final_third",
|
| 164 |
+
"att2_carries_into_attacking_third",
|
| 165 |
+
"att3_opponents_bypassed_by_passes",
|
| 166 |
+
"att4_last_line_break_passes",
|
| 167 |
+
"att5_passes_in_attacking_third",
|
| 168 |
+
"att6_buildup_phase_events",
|
| 169 |
+
"att7_direct_phase_events",
|
| 170 |
+
"att8_setplay_events",
|
| 171 |
+
"att9_quickbreak_events",
|
| 172 |
+
"att10_transition_events",
|
| 173 |
+
"att11_one_touch_passes",
|
| 174 |
+
"att12_quick_passes",
|
| 175 |
+
"att13_possessions_leading_to_shot",
|
| 176 |
+
"att14_possessions_leading_to_goal",
|
| 177 |
+
"att15_def_line_depth_total_pushed_m",
|
| 178 |
+
"att16_def_line_height_total_pushed_m",
|
| 179 |
+
"att17_forward_momentum_possessions",
|
| 180 |
+
"att18_passing_options_total",
|
| 181 |
+
"att19_dangerous_difficult_pass_options",
|
| 182 |
+
"att20_runs_ahead_of_ball",
|
| 183 |
+
"def1_total_defensive_engagements",
|
| 184 |
+
"def2_counter_press_actions",
|
| 185 |
+
"def3_recovery_press_actions",
|
| 186 |
+
"def4_pressing_chain_total_length",
|
| 187 |
+
"def5_pressing_chains_initiated",
|
| 188 |
+
"def6_max_pressing_chain_length",
|
| 189 |
+
"def7_danger_stopped",
|
| 190 |
+
"run1_line_breaking_runs",
|
| 191 |
+
"run2_line_pushing_runs",
|
| 192 |
+
"run3_runs_behind_defense",
|
| 193 |
+
"run4_overlap_runs",
|
| 194 |
+
"run5_attacking_third_runs",
|
| 195 |
+
"att_give_and_go_initiated",
|
| 196 |
+
]
|
| 197 |
+
|
| 198 |
+
features_df = features_df[col_order]
|
| 199 |
|
| 200 |
features_df.to_csv(output_path, index=False)
|
| 201 |
+
print(f"\nWrote {len(features_df)} rows x {len(features_df.columns)} columns to {output_path}")
|
| 202 |
print(f"Shape: {features_df.shape}")
|
| 203 |
return features_df
|
| 204 |
|
| 205 |
|
| 206 |
if __name__ == "__main__":
|
| 207 |
+
run_pipeline()
|