Example #1
0
def scrape():
    hold = []
    hold.append(['playername', 'points'])
    for page in build_fp_pages():
        r = requests.get(page[0])
        soup = BS(r.text, 'html.parser')
        for row in soup.find_all('tr', class_=re.compile('mpb-player-')):
            try:
                player_row = row.find_all('td')
                player_name = str(player_row[0].text)
                if player_name:
                    if player_name in RENAMES:
                        dk_name = RENAMES[player_name]
                        print('Renaming {} to {} to match DraftKings'.format(
                            player_name, dk_name))
                        player_name = dk_name

                hold.append([
                    player_name,
                    calculate_ppr(
                        page[1],
                        convert_fanpros_data(page[1],
                                             [x.text for x in player_row]))
                ])
            except Exception, e:
                print 'Error scraping FanPros data: {}'.format(e)
Example #2
0
def scrape_nfl_fan(limit):
    hold = []
    for i in range(0, limit / 25):
        param = (i * 25) + 1
        r = requests.get(NFL_FAN_URL.format(param, 'O'))
        soup = BS(r.text, 'html.parser')
        player_rows = filter(lambda x: x,
                             [x.find_all('td') for x in soup.find_all('tr')])
        for row in player_rows:
            player_stats = \
                map(lambda x:
                    unicode('0') if x == '-' else x, [x.text for x in row])
            if any(map(lambda x: x in player_stats[0], NFL_FAN_POSITIONS)):
                pos = filter(lambda x: x in player_stats[0],
                             NFL_FAN_POSITIONS)[0]
                name = player_stats[0].split(pos)[0].rstrip()
                projected_points = \
                    calculate_ppr(pos,
                                  convert_nfl_fantasy_data(pos, player_stats))
                if projected_points != 0:
                    hold.append([name, projected_points])
    for i in (0, 25):
        r = requests.get(NFL_FAN_URL.format(i, '8'))
        soup = BS(r.text, 'html.parser')
        player_rows = filter(lambda x: x,
                             [x.find_all('td') for x in soup.find_all('tr')])
        for row in player_rows:
            player_stats = \
                map(lambda x:
                    unicode('0') if x == '-' else x, [x.text for x in row])
            if any(map(lambda x: x in player_stats[0], ['DEF'])):
                name = player_stats[0].split('DEF')[0].rstrip()
                projected_points = \
                    calculate_ppr('DEF',
                                  convert_nfl_fantasy_data('DEF',
                                                           player_stats))
                hold.append([name, projected_points])
    return hold
def scrape_nfl_fan(limit):
    hold = []
    for i in range(0, limit/25):
        param = (i * 25) + 1
        r = requests.get(NFL_FAN_URL.format(param, 'O'))
        soup = BS(r.text, 'html.parser')
        player_rows = filter(lambda x: x,
                             [x.find_all('td') for x in soup.find_all('tr')])
        for row in player_rows:
            player_stats = \
                map(lambda x:
                    unicode('0') if x == '-' else x, [x.text for x in row])
            if any(map(lambda x: x in player_stats[0], NFL_FAN_POSITIONS)):
                pos = filter(lambda x:
                             x in player_stats[0], NFL_FAN_POSITIONS)[0]
                name = player_stats[0].split(pos)[0].rstrip()
                projected_points = \
                    calculate_ppr(pos,
                                  convert_nfl_fantasy_data(pos, player_stats))
                if projected_points != 0:
                    hold.append([name, projected_points])
    for i in (0, 25):
        r = requests.get(NFL_FAN_URL.format(i, '8'))
        soup = BS(r.text, 'html.parser')
        player_rows = filter(lambda x: x,
                             [x.find_all('td') for x in soup.find_all('tr')])
        for row in player_rows:
            player_stats = \
                map(lambda x:
                    unicode('0') if x == '-' else x, [x.text for x in row])
            if any(map(lambda x: x in player_stats[0], ['DEF'])):
                name = player_stats[0].split('DEF')[0].rstrip()
                projected_points = \
                    calculate_ppr('DEF',
                                  convert_nfl_fantasy_data('DEF',
                                                           player_stats))
                hold.append([name, projected_points])
    return hold
Example #4
0
def scrape():
    hold = []
    hold.append(['playername', 'points'])
    for page in build_fp_pages():
        r = requests.get(page[0])
        soup = BS(r.text, 'html.parser')
        for row in soup.find_all('tr', class_=re.compile('mpb-player-')):
            try:
                player_row = row.find_all('td')
                hold.append([
                    str(player_row[0].text),
                    calculate_ppr(
                        page[1],
                        convert_fanpros_data(page[1],
                                             [x.text for x in player_row]))
                ])
            except Exception, e:
                print 'Error scraping FanPros data: {}'.format(e)
def scrape():
    hold = []
    hold.append(['playername', 'points'])
    for page in build_fp_pages():
        r = requests.get(page[0])
        soup = BS(r.text, 'html.parser')
        for row in soup.find_all('tr', class_=re.compile('mpb-player-')):
            try:
                player_row = row.find_all('td')
                hold.append([
                    str(player_row[0].text),
                    calculate_ppr(
                        page[1],
                        convert_fanpros_data(page[1],
                                             [x.text for x in player_row]))
                ])
            except Exception, e:
                print 'Error scraping FanPros data: {}'.format(e)