knoxel commited on
Commit
56d87f4
·
verified ·
1 Parent(s): 3d8f544

Upload soccer_feature_engineering.py

Browse files
Files changed (1) hide show
  1. soccer_feature_engineering.py +164 -114
soccer_feature_engineering.py CHANGED
@@ -1,157 +1,207 @@
1
  """
2
- Soccer Feature Engineering Pipeline
3
- ====================================
4
  Engineers 33 match-level, team-level features from SkillCorner dynamic_events.csv.
5
  Based on the Kaggle Soccer Feature Engineering Hackathon requirements.
6
 
7
- Input: SkillCorner opendata repository (or any folder with *_dynamic_events.csv)
8
- Output: features.csv one row per team per match, 35 columns (match_id, team_id + 33 features)
9
 
10
- All features are raw aggregated counts or cumulative distances no percentages,
11
- no ratios, fully competition-compliant.
12
  """
13
 
14
  import glob
15
- import json
16
  import os
17
 
18
- import numpy as np
19
  import pandas as pd
20
 
21
 
22
  def discover_dynamic_events_files(data_root="/app/opendata/data/matches"):
23
  """Dynamically discover all *_dynamic_events.csv files via glob."""
24
- pattern = os.path.join(data_root, "*", "*_dynamic_events.csv")
25
  files = glob.glob(pattern, recursive=True)
26
  files.sort()
27
  return files
28
 
29
 
30
- def load_matches_metadata(data_root="/app/opendata/data"):
31
- """Load matches.json to get home/away team mapping."""
32
- path = os.path.join(data_root, "matches.json")
33
- with open(path, "r") as f:
34
- matches = json.load(f)
35
- mapping = {}
36
- for m in matches:
37
- mapping[str(m["id"])] = {
38
- "home_team_id": m["home_team"]["id"],
39
- "away_team_id": m["away_team"]["id"],
40
- "home_team_name": m["home_team"]["short_name"],
41
- "away_team_name": m["away_team"]["short_name"],
42
- }
43
- return mapping
44
-
45
-
46
- def compute_features_for_match(dynamic_events_path, match_meta=None):
47
  """
48
- Compute all 33 features for a single match's dynamic_events.csv.
49
- Returns a DataFrame with one row per team.
50
  """
51
- df = pd.read_csv(dynamic_events_path, low_memory=False)
52
  match_id = df["match_id"].iloc[0]
53
- team_ids = sorted(df["team_id"].unique().tolist())
54
 
55
- records = []
56
- for team_id in team_ids:
57
- rec = {"match_id": match_id, "team_id": int(team_id)}
58
-
59
- pp = df[(df["event_type"] == "player_possession") & (df["team_id"] == team_id)].copy()
60
- obe = df[(df["event_type"] == "on_ball_engagement") & (df["team_id"] == team_id)].copy()
61
- obr = df[(df["event_type"] == "off_ball_run") & (df["team_id"] == team_id)].copy()
62
-
63
- # DIMENSION 1 ATTACKING STRUCTURE (att1–att5)
64
- passes = pp[pp["pass_outcome"].notna()]
65
- rec["att1"] = int((passes["third_end"] == "attacking_third").sum())
66
- rec["att2"] = int(((pp["carry"] == True) & (pp["third_end"] == "attacking_third")).sum())
67
- pass_opp_bypassed = passes["n_opponents_bypassed"].fillna(0).clip(lower=0)
68
- rec["att3"] = float(pass_opp_bypassed.sum())
69
- rec["att4"] = int((passes["last_line_break"] == True).sum())
70
- rec["att5"] = int((passes["third_start"] == "attacking_third").sum())
71
-
72
- # DIMENSION 2 BUILD-UP PROFILE (att6–att10)
73
- phase_counts = pp["team_in_possession_phase_type"].value_counts()
74
- rec["att6"] = int(phase_counts.get("build_up", 0))
75
- rec["att7"] = int(phase_counts.get("direct", 0))
76
- rec["att8"] = int(phase_counts.get("set_play", 0))
77
- rec["att9"] = int(phase_counts.get("quick_break", 0))
78
- rec["att10"] = int(phase_counts.get("transition", 0))
79
-
80
- # DIMENSION 3 — POSSESSION QUALITY (att11–att20)
81
- rec["att11"] = int((pp["one_touch"] == True).sum())
82
- rec["att12"] = int((pp["quick_pass"] == True).sum())
83
- rec["att13"] = int(pp["lead_to_shot"].sum())
84
- rec["att14"] = int(pp["lead_to_goal"].sum())
85
- rec["att15"] = float(pp["delta_to_last_defensive_line_gain"].fillna(0).clip(lower=0).sum())
86
- rec["att16"] = float(pp["last_defensive_line_height_gain"].fillna(0).clip(lower=0).sum())
87
- rec["att17"] = int((pp["forward_momentum"] == True).sum())
88
- rec["att18"] = float(pp["n_passing_options"].fillna(0).sum())
89
- rec["att19"] = float(pp["n_passing_options_dangerous_difficult"].fillna(0).sum())
90
- rec["att20"] = int((obr["event_subtype"] == "run_ahead_of_the_ball").sum())
91
-
92
- # DIMENSION 4 — PRESSING & DEFENSIVE SHAPE (def1–def7)
93
- rec["def1"] = int(len(obe))
94
- rec["def2"] = int((obe["event_subtype"] == "counter_press").sum())
95
- rec["def3"] = int((obe["event_subtype"] == "recovery_press").sum())
96
- chain_starts = obe[obe["index_in_pressing_chain"] == 1.0]
97
- rec["def4"] = float(chain_starts["pressing_chain_length"].fillna(0).sum())
98
- rec["def5"] = int(len(chain_starts))
99
- rec["def6"] = float(obe["pressing_chain_length"].max() if len(obe) > 0 else 0)
100
- rec["def7"] = int((obe["stop_possession_danger"] == True).sum())
101
-
102
- # DIMENSION 5 OFF-BALL MOVEMENT INTELLIGENCE (run1–run5 + gng)
103
- rec["run1"] = int((obr["break_defensive_line"] == True).sum())
104
- rec["run2"] = int((obr["push_defensive_line"] == True).sum())
105
- rec["run3"] = int((obr["event_subtype"] == "behind").sum())
106
- rec["run4"] = int((obr["event_subtype"] == "overlap").sum())
107
- rec["run5"] = int((obr["third_start"] == "attacking_third").sum())
108
- rec["att_give_and_go_initiated"] = int((pp["initiate_give_and_go"] == True).sum())
109
-
110
- records.append(rec)
111
-
112
- return pd.DataFrame(records)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
 
114
 
115
  def run_pipeline(data_root="/app/opendata/data/matches", output_path="/app/features.csv"):
116
  files = discover_dynamic_events_files(data_root)
117
  print(f"Discovered {len(files)} dynamic_events.csv files")
118
 
119
- all_dfs = []
120
  for f in files:
121
  try:
122
- match_df = compute_features_for_match(f)
123
- all_dfs.append(match_df)
124
- print(f" ✓ {os.path.basename(f)} -> {len(match_df)} rows")
 
 
 
 
125
  except Exception as e:
126
- print(f" {os.path.basename(f)} -> ERROR: {e}")
127
 
128
- if not all_dfs:
129
  raise ValueError("No valid match files processed.")
130
 
131
- features_df = pd.concat(all_dfs, ignore_index=True)
132
-
133
- feature_cols = (
134
- [f"att{i}" for i in range(1, 21)] +
135
- [f"def{i}" for i in range(1, 8)] +
136
- [f"run{i}" for i in range(1, 6)] +
137
- ["att_give_and_go_initiated"]
138
- )
139
- ordered_cols = ["match_id", "team_id"] + feature_cols
140
- features_df = features_df[[c for c in ordered_cols if c in features_df.columns]]
141
-
142
- for col in features_df.columns:
143
- if features_df[col].dtype == float:
144
- rounded = features_df[col].round(2)
145
- if (rounded == rounded.astype(int)).all():
146
- features_df[col] = rounded.astype(int)
147
- else:
148
- features_df[col] = rounded
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
 
150
  features_df.to_csv(output_path, index=False)
151
- print(f"\nWrote {len(features_df)} rows × {len(features_df.columns)} columns to {output_path}")
152
  print(f"Shape: {features_df.shape}")
153
  return features_df
154
 
155
 
156
  if __name__ == "__main__":
157
- run_pipeline()
 
1
  """
2
+ Soccer Feature Engineering Pipeline - Competition Version
3
+ ===========================================================
4
  Engineers 33 match-level, team-level features from SkillCorner dynamic_events.csv.
5
  Based on the Kaggle Soccer Feature Engineering Hackathon requirements.
6
 
7
+ This script uses the EXACT column naming convention from the reference notebook
8
+ by Dev0907 to ensure full compatibility with competition evaluation.
9
 
10
+ Input: SkillCorner opendata repository (or any folder with *_dynamic_events.csv)
11
+ Output: features.csv - one row per team per match
12
  """
13
 
14
  import glob
 
15
  import os
16
 
 
17
  import pandas as pd
18
 
19
 
20
  def discover_dynamic_events_files(data_root="/app/opendata/data/matches"):
21
  """Dynamically discover all *_dynamic_events.csv files via glob."""
22
+ pattern = os.path.join(data_root, "**", "*_dynamic_events.csv")
23
  files = glob.glob(pattern, recursive=True)
24
  files.sort()
25
  return files
26
 
27
 
28
+ def compute_team_features(df, team):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  """
30
+ Compute 33 aggregated match-level features for a single team in a single match.
31
+ Uses EXACT column names from the reference notebook by Dev0907.
32
  """
 
33
  match_id = df["match_id"].iloc[0]
 
34
 
35
+ team_df = df[df["team_id"] == team]
36
+ poss = team_df[team_df["event_type"] == "player_possession"]
37
+ obe = team_df[team_df["event_type"] == "on_ball_engagement"]
38
+ obr = team_df[team_df["event_type"] == "off_ball_run"]
39
+
40
+ passes = poss[poss["pass_outcome"].notna()]
41
+
42
+ # DIMENSION 1: ATTACKING STRUCTURE
43
+ att1 = int((passes["third_end"] == "attacking_third").sum())
44
+
45
+ carries = poss[poss["carry"] == True]
46
+ att2 = int((carries["third_end"] == "attacking_third").sum())
47
+
48
+ att3 = int(passes["n_opponents_bypassed"].clip(lower=0).sum())
49
+
50
+ att4 = int(passes["last_line_break"].sum()) if "last_line_break" in passes.columns else 0
51
+
52
+ att5 = int((passes["third_start"] == "attacking_third").sum())
53
+
54
+ # DIMENSION 2: BUILD-UP PROFILE
55
+ att6 = int((poss["team_in_possession_phase_type"] == "build_up").sum())
56
+ att7 = int((poss["team_in_possession_phase_type"] == "direct").sum())
57
+ att8 = int((poss["team_in_possession_phase_type"] == "set_play").sum())
58
+ att9 = int((poss["team_in_possession_phase_type"] == "quick_break").sum())
59
+ att10 = int((poss["team_in_possession_phase_type"] == "transition").sum())
60
+
61
+ # DIMENSION 3: POSSESSION QUALITY
62
+ att11 = int(poss["one_touch"].sum())
63
+ att12 = int(poss["quick_pass"].sum())
64
+ att13 = int(poss["lead_to_shot"].sum())
65
+ att14 = int(poss["lead_to_goal"].sum())
66
+ att15 = float(round(poss["delta_to_last_defensive_line_gain"].clip(lower=0).sum(), 2))
67
+ att16 = float(round(poss["last_defensive_line_height_gain"].clip(lower=0).sum(), 2))
68
+ att17 = int(poss["forward_momentum"].sum())
69
+ att18 = int(poss["n_passing_options"].sum())
70
+ att19 = int(poss["n_passing_options_dangerous_difficult"].sum())
71
+ att20 = int((obr["event_subtype"] == "run_ahead_of_the_ball").sum())
72
+
73
+ # DIMENSION 4: PRESSING & DEFENSIVE SHAPE
74
+ def1 = int(len(obe))
75
+ def2 = int((obe["event_subtype"] == "counter_press").sum())
76
+ def3 = int((obe["event_subtype"] == "recovery_press").sum())
77
+
78
+ chains = obe[obe["pressing_chain_length"].notna()]
79
+ chain_starts = chains[chains["index_in_pressing_chain"] == 1.0]
80
+ def4 = int(chain_starts["pressing_chain_length"].sum())
81
+ def5 = int(len(chain_starts))
82
+ def6 = int(chains["pressing_chain_length"].max()) if len(chains) > 0 else 0
83
+
84
+ def7 = int(obe["stop_possession_danger"].sum())
85
+
86
+ # DIMENSION 5: OFF-BALL MOVEMENT INTELLIGENCE
87
+ run1 = int(obr["break_defensive_line"].sum())
88
+ run2 = int(obr["push_defensive_line"].sum())
89
+ run3 = int((obr["event_subtype"] == "behind").sum())
90
+ run4 = int((obr["event_subtype"] == "overlap").sum())
91
+ run5 = int((obr["third_start"] == "attacking_third").sum())
92
+ gng = int(poss["initiate_give_and_go"].sum()) if "initiate_give_and_go" in poss.columns else 0
93
+
94
+ return {
95
+ "match_id": match_id,
96
+ "team_id": team,
97
+ # Attacking Structure
98
+ "att1_passes_into_final_third": att1,
99
+ "att2_carries_into_attacking_third": att2,
100
+ "att3_opponents_bypassed_by_passes": att3,
101
+ "att4_last_line_break_passes": att4,
102
+ "att5_passes_in_attacking_third": att5,
103
+ # Build-Up Profile
104
+ "att6_buildup_phase_events": att6,
105
+ "att7_direct_phase_events": att7,
106
+ "att8_setplay_events": att8,
107
+ "att9_quickbreak_events": att9,
108
+ "att10_transition_events": att10,
109
+ # Possession Quality
110
+ "att11_one_touch_passes": att11,
111
+ "att12_quick_passes": att12,
112
+ "att13_possessions_leading_to_shot": att13,
113
+ "att14_possessions_leading_to_goal": att14,
114
+ "att15_def_line_depth_total_pushed_m": att15,
115
+ "att16_def_line_height_total_pushed_m": att16,
116
+ "att17_forward_momentum_possessions": att17,
117
+ "att18_passing_options_total": att18,
118
+ "att19_dangerous_difficult_pass_options": att19,
119
+ "att20_runs_ahead_of_ball": att20,
120
+ # Defensive Pressing
121
+ "def1_total_defensive_engagements": def1,
122
+ "def2_counter_press_actions": def2,
123
+ "def3_recovery_press_actions": def3,
124
+ "def4_pressing_chain_total_length": def4,
125
+ "def5_pressing_chains_initiated": def5,
126
+ "def6_max_pressing_chain_length": def6,
127
+ "def7_danger_stopped": def7,
128
+ # Off-Ball Movement
129
+ "run1_line_breaking_runs": run1,
130
+ "run2_line_pushing_runs": run2,
131
+ "run3_runs_behind_defense": run3,
132
+ "run4_overlap_runs": run4,
133
+ "run5_attacking_third_runs": run5,
134
+ "att_give_and_go_initiated": gng,
135
+ }
136
 
137
 
138
  def run_pipeline(data_root="/app/opendata/data/matches", output_path="/app/features.csv"):
139
  files = discover_dynamic_events_files(data_root)
140
  print(f"Discovered {len(files)} dynamic_events.csv files")
141
 
142
+ records = []
143
  for f in files:
144
  try:
145
+ df = pd.read_csv(f, low_memory=False)
146
+ match_id = df["match_id"].iloc[0]
147
+ teams = sorted(df["team_id"].unique().tolist())
148
+ for team in teams:
149
+ features = compute_team_features(df, team)
150
+ records.append(features)
151
+ print(f" match {match_id}: {len(teams)} teams")
152
  except Exception as e:
153
+ print(f" ERROR {os.path.basename(f)}: {e}")
154
 
155
+ if not records:
156
  raise ValueError("No valid match files processed.")
157
 
158
+ features_df = pd.DataFrame(records)
159
+
160
+ # Ensure exact column order as in reference notebook
161
+ col_order = [
162
+ "match_id", "team_id",
163
+ "att1_passes_into_final_third",
164
+ "att2_carries_into_attacking_third",
165
+ "att3_opponents_bypassed_by_passes",
166
+ "att4_last_line_break_passes",
167
+ "att5_passes_in_attacking_third",
168
+ "att6_buildup_phase_events",
169
+ "att7_direct_phase_events",
170
+ "att8_setplay_events",
171
+ "att9_quickbreak_events",
172
+ "att10_transition_events",
173
+ "att11_one_touch_passes",
174
+ "att12_quick_passes",
175
+ "att13_possessions_leading_to_shot",
176
+ "att14_possessions_leading_to_goal",
177
+ "att15_def_line_depth_total_pushed_m",
178
+ "att16_def_line_height_total_pushed_m",
179
+ "att17_forward_momentum_possessions",
180
+ "att18_passing_options_total",
181
+ "att19_dangerous_difficult_pass_options",
182
+ "att20_runs_ahead_of_ball",
183
+ "def1_total_defensive_engagements",
184
+ "def2_counter_press_actions",
185
+ "def3_recovery_press_actions",
186
+ "def4_pressing_chain_total_length",
187
+ "def5_pressing_chains_initiated",
188
+ "def6_max_pressing_chain_length",
189
+ "def7_danger_stopped",
190
+ "run1_line_breaking_runs",
191
+ "run2_line_pushing_runs",
192
+ "run3_runs_behind_defense",
193
+ "run4_overlap_runs",
194
+ "run5_attacking_third_runs",
195
+ "att_give_and_go_initiated",
196
+ ]
197
+
198
+ features_df = features_df[col_order]
199
 
200
  features_df.to_csv(output_path, index=False)
201
+ print(f"\nWrote {len(features_df)} rows x {len(features_df.columns)} columns to {output_path}")
202
  print(f"Shape: {features_df.shape}")
203
  return features_df
204
 
205
 
206
  if __name__ == "__main__":
207
+ run_pipeline()