knoxel commited on
Commit
f1da51e
·
verified ·
1 Parent(s): cd884e8

Upload soccer_feature_engineering.py

Browse files
Files changed (1) hide show
  1. soccer_feature_engineering.py +157 -1
soccer_feature_engineering.py CHANGED
@@ -1 +1,157 @@
1
- See /app/soccer_feature_engineering.py
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Soccer Feature Engineering Pipeline
3
+ ====================================
4
+ Engineers 33 match-level, team-level features from SkillCorner dynamic_events.csv.
5
+ Based on the Kaggle Soccer Feature Engineering Hackathon requirements.
6
+
7
+ Input: SkillCorner opendata repository (or any folder with *_dynamic_events.csv)
8
+ Output: features.csv — one row per team per match, 35 columns (match_id, team_id + 33 features)
9
+
10
+ All features are raw aggregated counts or cumulative distances — no percentages,
11
+ no ratios, fully competition-compliant.
12
+ """
13
+
14
+ import glob
15
+ import json
16
+ import os
17
+
18
+ import numpy as np
19
+ import pandas as pd
20
+
21
+
22
+ def discover_dynamic_events_files(data_root="/app/opendata/data/matches"):
23
+ """Dynamically discover all *_dynamic_events.csv files via glob."""
24
+ pattern = os.path.join(data_root, "*", "*_dynamic_events.csv")
25
+ files = glob.glob(pattern, recursive=True)
26
+ files.sort()
27
+ return files
28
+
29
+
30
+ def load_matches_metadata(data_root="/app/opendata/data"):
31
+ """Load matches.json to get home/away team mapping."""
32
+ path = os.path.join(data_root, "matches.json")
33
+ with open(path, "r") as f:
34
+ matches = json.load(f)
35
+ mapping = {}
36
+ for m in matches:
37
+ mapping[str(m["id"])] = {
38
+ "home_team_id": m["home_team"]["id"],
39
+ "away_team_id": m["away_team"]["id"],
40
+ "home_team_name": m["home_team"]["short_name"],
41
+ "away_team_name": m["away_team"]["short_name"],
42
+ }
43
+ return mapping
44
+
45
+
46
+ def compute_features_for_match(dynamic_events_path, match_meta=None):
47
+ """
48
+ Compute all 33 features for a single match's dynamic_events.csv.
49
+ Returns a DataFrame with one row per team.
50
+ """
51
+ df = pd.read_csv(dynamic_events_path, low_memory=False)
52
+ match_id = df["match_id"].iloc[0]
53
+ team_ids = sorted(df["team_id"].unique().tolist())
54
+
55
+ records = []
56
+ for team_id in team_ids:
57
+ rec = {"match_id": match_id, "team_id": int(team_id)}
58
+
59
+ pp = df[(df["event_type"] == "player_possession") & (df["team_id"] == team_id)].copy()
60
+ obe = df[(df["event_type"] == "on_ball_engagement") & (df["team_id"] == team_id)].copy()
61
+ obr = df[(df["event_type"] == "off_ball_run") & (df["team_id"] == team_id)].copy()
62
+
63
+ # DIMENSION 1 — ATTACKING STRUCTURE (att1–att5)
64
+ passes = pp[pp["pass_outcome"].notna()]
65
+ rec["att1"] = int((passes["third_end"] == "attacking_third").sum())
66
+ rec["att2"] = int(((pp["carry"] == True) & (pp["third_end"] == "attacking_third")).sum())
67
+ pass_opp_bypassed = passes["n_opponents_bypassed"].fillna(0).clip(lower=0)
68
+ rec["att3"] = float(pass_opp_bypassed.sum())
69
+ rec["att4"] = int((passes["last_line_break"] == True).sum())
70
+ rec["att5"] = int((passes["third_start"] == "attacking_third").sum())
71
+
72
+ # DIMENSION 2 — BUILD-UP PROFILE (att6–att10)
73
+ phase_counts = pp["team_in_possession_phase_type"].value_counts()
74
+ rec["att6"] = int(phase_counts.get("build_up", 0))
75
+ rec["att7"] = int(phase_counts.get("direct", 0))
76
+ rec["att8"] = int(phase_counts.get("set_play", 0))
77
+ rec["att9"] = int(phase_counts.get("quick_break", 0))
78
+ rec["att10"] = int(phase_counts.get("transition", 0))
79
+
80
+ # DIMENSION 3 — POSSESSION QUALITY (att11–att20)
81
+ rec["att11"] = int((pp["one_touch"] == True).sum())
82
+ rec["att12"] = int((pp["quick_pass"] == True).sum())
83
+ rec["att13"] = int(pp["lead_to_shot"].sum())
84
+ rec["att14"] = int(pp["lead_to_goal"].sum())
85
+ rec["att15"] = float(pp["delta_to_last_defensive_line_gain"].fillna(0).clip(lower=0).sum())
86
+ rec["att16"] = float(pp["last_defensive_line_height_gain"].fillna(0).clip(lower=0).sum())
87
+ rec["att17"] = int((pp["forward_momentum"] == True).sum())
88
+ rec["att18"] = float(pp["n_passing_options"].fillna(0).sum())
89
+ rec["att19"] = float(pp["n_passing_options_dangerous_difficult"].fillna(0).sum())
90
+ rec["att20"] = int((obr["event_subtype"] == "run_ahead_of_the_ball").sum())
91
+
92
+ # DIMENSION 4 — PRESSING & DEFENSIVE SHAPE (def1–def7)
93
+ rec["def1"] = int(len(obe))
94
+ rec["def2"] = int((obe["event_subtype"] == "counter_press").sum())
95
+ rec["def3"] = int((obe["event_subtype"] == "recovery_press").sum())
96
+ chain_starts = obe[obe["index_in_pressing_chain"] == 1.0]
97
+ rec["def4"] = float(chain_starts["pressing_chain_length"].fillna(0).sum())
98
+ rec["def5"] = int(len(chain_starts))
99
+ rec["def6"] = float(obe["pressing_chain_length"].max() if len(obe) > 0 else 0)
100
+ rec["def7"] = int((obe["stop_possession_danger"] == True).sum())
101
+
102
+ # DIMENSION 5 — OFF-BALL MOVEMENT INTELLIGENCE (run1–run5 + gng)
103
+ rec["run1"] = int((obr["break_defensive_line"] == True).sum())
104
+ rec["run2"] = int((obr["push_defensive_line"] == True).sum())
105
+ rec["run3"] = int((obr["event_subtype"] == "behind").sum())
106
+ rec["run4"] = int((obr["event_subtype"] == "overlap").sum())
107
+ rec["run5"] = int((obr["third_start"] == "attacking_third").sum())
108
+ rec["att_give_and_go_initiated"] = int((pp["initiate_give_and_go"] == True).sum())
109
+
110
+ records.append(rec)
111
+
112
+ return pd.DataFrame(records)
113
+
114
+
115
+ def run_pipeline(data_root="/app/opendata/data/matches", output_path="/app/features.csv"):
116
+ files = discover_dynamic_events_files(data_root)
117
+ print(f"Discovered {len(files)} dynamic_events.csv files")
118
+
119
+ all_dfs = []
120
+ for f in files:
121
+ try:
122
+ match_df = compute_features_for_match(f)
123
+ all_dfs.append(match_df)
124
+ print(f" ✓ {os.path.basename(f)} -> {len(match_df)} rows")
125
+ except Exception as e:
126
+ print(f" ✗ {os.path.basename(f)} -> ERROR: {e}")
127
+
128
+ if not all_dfs:
129
+ raise ValueError("No valid match files processed.")
130
+
131
+ features_df = pd.concat(all_dfs, ignore_index=True)
132
+
133
+ feature_cols = (
134
+ [f"att{i}" for i in range(1, 21)] +
135
+ [f"def{i}" for i in range(1, 8)] +
136
+ [f"run{i}" for i in range(1, 6)] +
137
+ ["att_give_and_go_initiated"]
138
+ )
139
+ ordered_cols = ["match_id", "team_id"] + feature_cols
140
+ features_df = features_df[[c for c in ordered_cols if c in features_df.columns]]
141
+
142
+ for col in features_df.columns:
143
+ if features_df[col].dtype == float:
144
+ rounded = features_df[col].round(2)
145
+ if (rounded == rounded.astype(int)).all():
146
+ features_df[col] = rounded.astype(int)
147
+ else:
148
+ features_df[col] = rounded
149
+
150
+ features_df.to_csv(output_path, index=False)
151
+ print(f"\nWrote {len(features_df)} rows × {len(features_df.columns)} columns to {output_path}")
152
+ print(f"Shape: {features_df.shape}")
153
+ return features_df
154
+
155
+
156
+ if __name__ == "__main__":
157
+ run_pipeline()