This repository was archived by the owner on May 8, 2018. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnfl.py
More file actions
executable file
·59 lines (53 loc) · 1.74 KB
/
nfl.py
File metadata and controls
executable file
·59 lines (53 loc) · 1.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import requests
import csv
import os
positions = {
'QB': 1,
'RB': 2,
'WR': 3,
'TE': 4,
'K': 7,
'DEF': 8
}
pages = {
'QB': 4,
'RB': 10,
'WR': 13,
'TE': 7,
'K': 2,
'DEF': 2,
}
weeks = range(1, 22)
def cbs(pos, week):
url = 'http://fantasy.nfl.com/research/projections?position={pos}&sort=projectedPts&statCategory=projectedStats&statSeason=2017&statType=weekProjectedStats&statWeek={week}&offset={i}'.format(pos=positions[pos], week=week, i="{i}")
try:
contents = requests.get(url.format(i=1)).content
soup = BeautifulSoup(contents, "html.parser")
table = soup.find('table')
header = [td.text.rstrip().replace(u'\xa0', u'') for td in table.find('thead').find_all('tr')[1].find_all('th')]
data = []
for i in range(pages[pos]):
contents = requests.get(url.format(i=25 * i + 1)).content
soup = BeautifulSoup(contents, "html.parser")
table = soup.find('table')
data += [[td.text.rstrip().replace(u'\xa0', u'') for td in row.find_all('td')] for row in table.find('tbody').find_all('tr')]
except IndexError:
print("Failed: " + pos + " " + str(week))
return
filename = 'nfl_{pos}_week{week}.csv'.format(pos=pos, week=week)
with open(filename, 'w') as f:
print('writing: ', filename)
writer = csv.writer(f)
try:
writer.writerow(header)
writer.writerows(data)
except UnicodeEncodeError:
print("Failed: " + pos + " " + str(week))
print(header)
print(data)
return
os.chdir('nfl')
[cbs(pos, week) for pos in positions for week in weeks]
os.chdir('..')