Upload uefa_euro2020_processing.py
Browse filesThis file contains the code for all data processing work to create the final dataset.
- uefa_euro2020_processing.py +280 -0
uefa_euro2020_processing.py
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""UEFA_Euro2020_Processing.ipynb
|
| 3 |
+
|
| 4 |
+
Automatically generated by Colaboratory.
|
| 5 |
+
|
| 6 |
+
Original file is located at
|
| 7 |
+
https://colab.research.google.com/drive/14eW1QiGXrszsqNFVKnT7WjDyDmxVuIve
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import pandas as pd
|
| 11 |
+
import numpy as np
|
| 12 |
+
from functools import reduce
|
| 13 |
+
|
| 14 |
+
match_events = pd.read_csv('/content/Match events.csv')
|
| 15 |
+
match_information = pd.read_csv('/content/Match information.csv')
|
| 16 |
+
match_line_up = pd.read_csv('/content/Match line-ups.csv')
|
| 17 |
+
match_player_stats = pd.read_csv('/content/player_stats.csv')
|
| 18 |
+
match_team_stats = pd.read_csv('/content/Match team statistics.csv')
|
| 19 |
+
pre_match_info = pd.read_csv('/content/Pre-match information.csv')
|
| 20 |
+
|
| 21 |
+
# impute the missing referee from CONMEBOL
|
| 22 |
+
match_information['RefereeWebName'] = match_information['RefereeWebName'].fillna("Rapallini")
|
| 23 |
+
|
| 24 |
+
# add columns that contain useful information for referee statistics
|
| 25 |
+
Euro2020_df = match_information.copy()
|
| 26 |
+
|
| 27 |
+
# add a stage variable to classify the matches
|
| 28 |
+
|
| 29 |
+
Euro2020_df.insert(loc=5, column = "Stage", value = np.where(Euro2020_df['MatchDay'] <= 3, 'Group Stage', 'Knockout Stage'))
|
| 30 |
+
|
| 31 |
+
# add varibles that contain useful statistics for referees
|
| 32 |
+
|
| 33 |
+
Euro2020_df.insert(loc=17, column='NumberofMatchesRefereedPostMatch',
|
| 34 |
+
value=Euro2020_df.groupby('RefereeWebName').cumcount().astype(int) + 1)
|
| 35 |
+
Euro2020_df.insert(loc=18, column='TotalNumberofMatchesRefereed',
|
| 36 |
+
value=Euro2020_df.groupby('RefereeWebName')['RefereeWebName'].transform('count').astype(int))
|
| 37 |
+
|
| 38 |
+
Euro2020_df.insert(loc=19, column = 'NumberofMatchesRefereedinGroupStage',
|
| 39 |
+
value = Euro2020_df.groupby('RefereeWebName')['Stage'].transform(lambda x: (x == 'Group Stage').sum()))
|
| 40 |
+
|
| 41 |
+
Euro2020_df.insert(loc=20, column = 'NumberofMatchesRefereedinKnockoutStage',
|
| 42 |
+
value = Euro2020_df.groupby('RefereeWebName')['Stage'].transform(lambda x: (x == 'Knockout Stage').sum()))
|
| 43 |
+
|
| 44 |
+
# create nested structures for match events
|
| 45 |
+
|
| 46 |
+
# Create a new DataFrame with only the columns needed for nesting
|
| 47 |
+
nested = match_events.iloc[:,[0] + list(range(3, 13))]
|
| 48 |
+
|
| 49 |
+
# Create another new df with only the separate columns
|
| 50 |
+
separate = match_events.iloc[:,0:3].drop_duplicates().set_index("MatchID")
|
| 51 |
+
|
| 52 |
+
# Group by 'MatchID' and 'Phase', create a nested structure for match events
|
| 53 |
+
nested_structure = (nested.groupby(['MatchID', 'Phase'])
|
| 54 |
+
.apply(lambda x: x.drop('MatchID', axis=1).to_dict('records'))
|
| 55 |
+
.reset_index(name='Events')
|
| 56 |
+
.pivot(index='MatchID', columns="Phase", values='Events'))
|
| 57 |
+
|
| 58 |
+
# Rename phases
|
| 59 |
+
phase_names = {
|
| 60 |
+
1: '1-First Half',
|
| 61 |
+
2: '2-Second Half',
|
| 62 |
+
3: '3-Extra Time First Half',
|
| 63 |
+
4: '4-Extra Time Second Half',
|
| 64 |
+
5: '5-Penalty Shootout'
|
| 65 |
+
}
|
| 66 |
+
nested_structure.rename(columns=phase_names, inplace=True)
|
| 67 |
+
|
| 68 |
+
# Combine the phase columns into one 'MatchEvent' column
|
| 69 |
+
nested_structure['MatchEvent'] = nested_structure.apply(lambda row: {phase: events for phase, events in row.items()}, axis=1)
|
| 70 |
+
# Drop the individual phase columns
|
| 71 |
+
nested_structure.drop(columns=phase_names.values(), inplace=True)
|
| 72 |
+
# Join the separate columns with the nested structure
|
| 73 |
+
nested_match_events = separate.join(nested_structure).reset_index()
|
| 74 |
+
nested_match_events = nested_match_events.drop(nested_match_events.columns[[1, 2]],axis=1)
|
| 75 |
+
|
| 76 |
+
# distinguish participants from the home and away teams in the line up dataset. Since only the pre_match_info
|
| 77 |
+
# df contains variables that distinguish home and away teams, need to combine both dfs and continue data processing.
|
| 78 |
+
line_up_merged = pd.merge(match_line_up, pre_match_info.iloc[:,[0,3,4,7]],
|
| 79 |
+
on=['MatchID','ID'], how='left')
|
| 80 |
+
|
| 81 |
+
# Create nested structures for the home and away team's line-ups in each match
|
| 82 |
+
|
| 83 |
+
# Variables for staff and players respectively
|
| 84 |
+
staff_vars = ['Country', 'ID', 'OfficialName', 'OfficialSurname', 'ShortName', 'Role']
|
| 85 |
+
player_vars = [col for col in line_up_merged.columns if col not in ['MatchID', 'HomeTeamName', 'AwayTeamName',
|
| 86 |
+
'IsPitch', 'IsBench', 'IsStaff', 'IsHomeTeam','IsAwayTeam']]
|
| 87 |
+
|
| 88 |
+
# Function to create nested structure for one team
|
| 89 |
+
def create_team_structure(x, is_home_team):
|
| 90 |
+
if is_home_team:
|
| 91 |
+
# Filter for home team players
|
| 92 |
+
squad_df = x[x['IsHomeTeam'] == True].copy()
|
| 93 |
+
else:
|
| 94 |
+
# Filter for away team players
|
| 95 |
+
squad_df = x[x['IsHomeTeam'] == False].copy()
|
| 96 |
+
|
| 97 |
+
# Starting 11
|
| 98 |
+
starting_11 = squad_df[squad_df['IsPitch']][player_vars]
|
| 99 |
+
|
| 100 |
+
# Benched players
|
| 101 |
+
benched_players = squad_df[squad_df['IsBench']][player_vars]
|
| 102 |
+
|
| 103 |
+
# Staff
|
| 104 |
+
staff = squad_df[squad_df['IsStaff']][staff_vars]
|
| 105 |
+
|
| 106 |
+
return {'Starting11': starting_11.to_dict('records'),
|
| 107 |
+
'Benched Players': benched_players.to_dict('records'),
|
| 108 |
+
'Staff': staff.to_dict('records')}
|
| 109 |
+
|
| 110 |
+
# Apply the function to each match for home and away teams
|
| 111 |
+
nested_line_up = line_up_merged.groupby('MatchID').apply(lambda line_up_merged: {
|
| 112 |
+
'HomeTeamLineUp': create_team_structure(line_up_merged, True),
|
| 113 |
+
'AwayTeamLineUp': create_team_structure(line_up_merged, False)
|
| 114 |
+
}).reset_index(name='TeamLineUps')
|
| 115 |
+
|
| 116 |
+
# create nested structures for team stats
|
| 117 |
+
|
| 118 |
+
# Firstly retrieve and classify all the team stats. Also explore the difference in elements of team stats
|
| 119 |
+
# player stats so that the classification on both could be easier.
|
| 120 |
+
team_unique = list(match_team_stats['StatsName'].unique())
|
| 121 |
+
print(team_unique)
|
| 122 |
+
|
| 123 |
+
player_unique = list(match_player_stats['StatsName'].unique())
|
| 124 |
+
print(player_unique)
|
| 125 |
+
|
| 126 |
+
set1 = set(team_unique)
|
| 127 |
+
set2 = set(player_unique)
|
| 128 |
+
|
| 129 |
+
# Find elements in list1 but not in list2
|
| 130 |
+
difference1 = set1 - set2
|
| 131 |
+
|
| 132 |
+
# Find elements in list2 but not in list1
|
| 133 |
+
difference2 = set2 - set1
|
| 134 |
+
|
| 135 |
+
# Convert the sets back to lists, if needed
|
| 136 |
+
diff_list1 = list(difference1)
|
| 137 |
+
diff_list2 = list(difference2)
|
| 138 |
+
|
| 139 |
+
print(diff_list1)
|
| 140 |
+
print(diff_list2)
|
| 141 |
+
|
| 142 |
+
# classify statistics
|
| 143 |
+
attacking = [team_unique[0]] + team_unique[2:8] + [team_unique[22]] + team_unique[24:56] + team_unique[58:74] + team_unique[93:106] + [team_unique[120]] + [team_unique[178]] + team_unique[182:184] + team_unique[186:189] + [team_unique[192]]
|
| 144 |
+
possession = [team_unique[1]] + team_unique[16:18] + [team_unique[56]] + team_unique[74:93] + team_unique[112:119]
|
| 145 |
+
violation_foul_discipline = [team_unique[8]] + team_unique[13:16] + team_unique[147:156]
|
| 146 |
+
goalkeeping = [team_unique[9]] + team_unique[125:146] + team_unique[189:191]
|
| 147 |
+
defending = team_unique[10:13] + [team_unique[21]] + [team_unique[23]] + team_unique[106:112] + [team_unique[119]] + team_unique[121:125]
|
| 148 |
+
coverage_speed = team_unique[18:21] + team_unique[156:169] + [team_unique[177]] + team_unique[179:181] + team_unique[184:186] + [team_unique[191]]
|
| 149 |
+
time_stats = [team_unique[57]] + [team_unique[146]] + team_unique[169:177]
|
| 150 |
+
matches_played = [team_unique[181]]
|
| 151 |
+
|
| 152 |
+
# check if the categories cover all team stats
|
| 153 |
+
set3 = set(attacking+possession+violation_foul_discipline+goalkeeping+defending+coverage_speed+time_stats+matches_played)
|
| 154 |
+
set4 = set(team_unique)
|
| 155 |
+
difff = list(set4-set3)
|
| 156 |
+
difff
|
| 157 |
+
|
| 158 |
+
# add unique stats for players to certain categories
|
| 159 |
+
|
| 160 |
+
player_time_stats = time_stats + [diff_list2[0]] + [diff_list2[2]] + [diff_list2[4]]
|
| 161 |
+
player_coverage_speed = coverage_speed + [diff_list2[1]] + [diff_list2[3]]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
# assign category based on names for team stats
|
| 165 |
+
def assign_category(name):
|
| 166 |
+
if name in attacking:
|
| 167 |
+
return 'attacking'
|
| 168 |
+
elif name in possession:
|
| 169 |
+
return 'possession'
|
| 170 |
+
elif name in violation_foul_discipline:
|
| 171 |
+
return 'violation&foul&discipline'
|
| 172 |
+
elif name in goalkeeping:
|
| 173 |
+
return 'goalkeeping'
|
| 174 |
+
elif name in defending:
|
| 175 |
+
return 'defending'
|
| 176 |
+
elif name in coverage_speed or name in player_coverage_speed:
|
| 177 |
+
return 'coverage&speed'
|
| 178 |
+
elif name in time_stats or name in player_time_stats:
|
| 179 |
+
return 'time stats'
|
| 180 |
+
elif name in matches_played:
|
| 181 |
+
return 'matches played'
|
| 182 |
+
|
| 183 |
+
# Apply the function to create a new column 'Category' for both team stats and player stats
|
| 184 |
+
match_team_stats['StatsCategory'] = match_team_stats['StatsName'].apply(lambda name: assign_category(name))
|
| 185 |
+
match_player_stats['StatsCategory'] = match_player_stats['StatsName'].apply(lambda name: assign_category(name))
|
| 186 |
+
|
| 187 |
+
# create the nested structure for team stats
|
| 188 |
+
stats_details_cols = ['TeamID', 'TeamName', 'StatsID', 'StatsName', 'Value', 'Rank']
|
| 189 |
+
|
| 190 |
+
# Function to create nested stats by category
|
| 191 |
+
def nested_stats(group):
|
| 192 |
+
# Create nested structure for home and away team stats
|
| 193 |
+
home_stats = group[group['IsHomeTeam']]
|
| 194 |
+
away_stats = group[group['IsAwayTeam']]
|
| 195 |
+
|
| 196 |
+
# Function to create stats by category
|
| 197 |
+
def stats_by_category(team_stats):
|
| 198 |
+
return team_stats.groupby('StatsCategory')[stats_details_cols].apply(lambda x: x.to_dict('records')).to_dict()
|
| 199 |
+
|
| 200 |
+
# Create the nested stats dictionary
|
| 201 |
+
nested_stats_dict = {
|
| 202 |
+
'HomeTeamStats': stats_by_category(home_stats),
|
| 203 |
+
'AwayTeamStats': stats_by_category(away_stats)
|
| 204 |
+
}
|
| 205 |
+
|
| 206 |
+
return nested_stats_dict
|
| 207 |
+
|
| 208 |
+
# Group by 'MatchID' and apply the nested stats function
|
| 209 |
+
nested_team_stats = match_team_stats.groupby('MatchID').apply(nested_stats).reset_index(name='TeamStats')
|
| 210 |
+
|
| 211 |
+
# create the nested structure for player stats
|
| 212 |
+
player_stats_details = ['PlayerID', 'PlayerName', 'PlayerSurname', 'IsGoalkeeper', 'PlayedTime', 'StatsID', 'StatsName', 'Value', 'Rank']
|
| 213 |
+
|
| 214 |
+
pre_match = pre_match_info.copy()
|
| 215 |
+
pre_match.rename(columns={'ID': 'PlayerID'}, inplace=True)
|
| 216 |
+
player_stats_merged = pd.merge(match_player_stats, pre_match.iloc[:,[0,3,4,7]],
|
| 217 |
+
on=['MatchID','PlayerID'], how='left')
|
| 218 |
+
|
| 219 |
+
# Function to create nested stats by category
|
| 220 |
+
def nested_stats(group):
|
| 221 |
+
# Create nested structure for home and away team stats
|
| 222 |
+
home_stats = group[group['IsHomeTeam']]
|
| 223 |
+
away_stats = group[group['IsAwayTeam']]
|
| 224 |
+
|
| 225 |
+
# Function to create stats by category
|
| 226 |
+
def stats_by_category(player_stats):
|
| 227 |
+
return player_stats.groupby('StatsCategory')[player_stats_details].apply(lambda x: x.to_dict('records')).to_dict()
|
| 228 |
+
|
| 229 |
+
# Create the nested stats dictionary
|
| 230 |
+
nested_stats_dict = {
|
| 231 |
+
'HomeTeamPlayerStats': stats_by_category(home_stats),
|
| 232 |
+
'AwayTeamPlayerStats': stats_by_category(away_stats)
|
| 233 |
+
}
|
| 234 |
+
|
| 235 |
+
return nested_stats_dict
|
| 236 |
+
|
| 237 |
+
# Group by 'MatchID' and apply the nested stats function
|
| 238 |
+
nested_player_stats = player_stats_merged.groupby('MatchID').apply(nested_stats).reset_index(name='PlayerStats')
|
| 239 |
+
|
| 240 |
+
# create nested structure for pre match player info
|
| 241 |
+
players_info = pre_match[pre_match['IsStaff'] == False]
|
| 242 |
+
|
| 243 |
+
# Define the columns we want to include for each player
|
| 244 |
+
player_info_vars = ['PlayerID', 'OfficialName', 'OfficialSurname', 'JerseyName', 'ShortName', 'GoalScored', 'CleanSheet', 'SuspendedIfBooked', 'Role']
|
| 245 |
+
|
| 246 |
+
# Create nested structure
|
| 247 |
+
def create_player_structure(df, is_home_team):
|
| 248 |
+
# Filter for home or away team players
|
| 249 |
+
home_team_players = df[df['IsHomeTeam'] == is_home_team][player_info_vars]
|
| 250 |
+
# Create a list of dictionaries, one for each player
|
| 251 |
+
return home_team_players.to_dict('records')
|
| 252 |
+
|
| 253 |
+
# Construct the nested structure for the DataFrame
|
| 254 |
+
nested_structure = {
|
| 255 |
+
'MatchID': [],
|
| 256 |
+
'PlayerPreMatchInfo': []
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
for match_id, group in players_info.groupby('MatchID'):
|
| 260 |
+
nested_structure['MatchID'].append(match_id)
|
| 261 |
+
nested_structure['PlayerPreMatchInfo'].append({
|
| 262 |
+
'HomeTeamPlayerInfo': create_player_structure(group, True),
|
| 263 |
+
'AwayTeamPlayerInfo': create_player_structure(group, False)
|
| 264 |
+
})
|
| 265 |
+
|
| 266 |
+
# Convert the nested structure to a DataFrame
|
| 267 |
+
nested_player_pre_match_info = pd.DataFrame(nested_structure)
|
| 268 |
+
|
| 269 |
+
# merge all sub datasets with nested structures into the final df
|
| 270 |
+
all_dfs = [Euro2020_df,nested_match_events,nested_line_up,nested_team_stats,nested_player_stats,nested_player_pre_match_info]
|
| 271 |
+
|
| 272 |
+
# Using functools.reduce to merge all DataFrames
|
| 273 |
+
Euro2020_df_final = reduce(lambda left, right: pd.merge(left, right, on='MatchID', how='outer'), all_dfs)
|
| 274 |
+
Euro2020_df_final
|
| 275 |
+
|
| 276 |
+
Euro2020_df_final.to_csv('Euro2020.csv', index=False)
|
| 277 |
+
|
| 278 |
+
Euro2020_df_final.to_json('Euro2020.json', orient='records', lines=False)
|
| 279 |
+
|
| 280 |
+
Euro2020_df_final.to_parquet('Euro2020.parquet', index = False)
|