I have this code to scrape the Player information (Name, Position, Number) by pasting in a URL from 'any' ESPN Roster page. I say 'any' because any page that has at least one player without a number/jersey value errors out. Is there a way to fix such an error.
As an example of each, the Philadelphia Eagles page converts correctly (https://www.espn.com/nfl/team/roster/_/name/phi) But the Detroit Lions roster does not (https://www.espn.com/nfl/team/roster/_/name/det)
# -*- coding: utf-8 -*-
import os, json, re
import requests
team = ''
def SavePlayerData(DATA):
global team
for s in ['\\','/',':','*','?','"','<','>','|']:
team = team.replace(s,'')
outfilename = '%s.txt'%(team)
with open(outfilename, 'w') as out_file:
for line in DATA:
out_file.write(line)
def GetTeamData(link):
global opener, headers, team, short
response = opener.get(link,headers=headers).text.encode('utf-8')
content = re.search("window\['__espnfitt__'\]\=(.+?)\;</script>",response).group(1)
jsonobj = json.loads(content)
roster = jsonobj['page']['content']['roster']
team = roster['team']['displayName']
coach = roster['coach']['description']
TEAM = []
for group in roster['groups']:
for player in group['athletes']:
n=player['name']
p=player['position']
j=player['jersey']
DATA = '%s%s\t%s %s %s (%s)\t[%s]\n'%(short,j,team,p,n,j,n)
TEAM.append(DATA)
DATA = '%shc\t%s %s %s\t[%s]\n'%(short,team,'head coach',coach,coach)
TEAM.append(DATA)
SavePlayerData(TEAM)
opener = requests.Session()
headers = {'host': 'www.espn.com',
'user-agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'}
if __name__=="__main__":
teamURL = raw_input(' >> Enter the Team Roster URL :: ').strip()
short = raw_input(' >> Enter the Letter for this Team :: ').strip().lower()
if not short:
short='d'
try:
if not teamURL:
raise Exception
if not '/roster/' in teamURL:
teamURL = teamURL.replace('/team/_/','/team/roster/_/')
print ('\n >> Collecting Data from <%s>\n'%(teamURL))
GetTeamData(teamURL)
print (' >> Link Scraped & Data Saved to File')
except Exception as e:
print ('\n >> Failed to Get Required Data, Re-Check your Input URL.')
You could use the try/except, or just put in a conditional statement to check if the jersey is in the data:
import os, json, re
import requests
team = ''
def SavePlayerData(DATA):
global team
for s in ['\\','/',':','*','?','"','<','>','|']:
team = team.replace(s,'')
outfilename = '%s.txt'%(team)
with open(outfilename, 'w') as out_file:
for line in DATA:
out_file.write(line)
def GetTeamData(link):
global opener, headers, team, short
response = opener.get(link,headers=headers).text
content = re.search("window\['__espnfitt__'\]\=(.+?)\;</script>",response).group(1)
jsonobj = json.loads(content)
roster = jsonobj['page']['content']['roster']
team = roster['team']['displayName']
coach = roster['coach']['description']
TEAM = []
for group in roster['groups']:
for player in group['athletes']:
n=player['name']
p=player['position']
if 'jersey' in player:
j=player['jersey']
else:
j = ''
DATA = '%s%s\t%s %s %s (%s)\t[%s]\n'%(short,j,team,p,n,j,n)
TEAM.append(DATA)
DATA = '%shc\t%s %s %s\t[%s]\n'%(short,team,'head coach',coach,coach)
TEAM.append(DATA)
SavePlayerData(TEAM)
opener = requests.Session()
headers = {'host': 'www.espn.com',
'user-agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'}
if __name__=="__main__":
teamURL = input(' >> Enter the Team Roster URL :: ').strip()
short = input(' >> Enter the Letter for this Team :: ').strip().lower()
if not short:
short='d'
try:
if not teamURL:
raise Exception
if not '/roster/' in teamURL:
teamURL = teamURL.replace('/team/_/','/team/roster/_/')
print ('\n >> Collecting Data from <%s>\n'%(teamURL))
GetTeamData(teamURL)
print (' >> Link Scraped & Data Saved to File')
except Exception as e:
print ('\n >> Failed to Get Required Data, Re-Check your Input URL.')