def main(): p = MyFantasyLeagueNFLParser() matched_players = [] unmatched_players = [] # MyFantasyLeagueNFLParser.players returns a list of dictionaries # Easier to do name matching if transform to dictionary where full_name is key, player is value fname = 'myfantasyleague_projections/players.xml' positions = ['QB', 'RB', 'WR', 'TE'] players_match_from = { player['full_name']: player for player in p.players(positions=positions, fname=fname) } # now get players from other sites for site, command in commands.items(): result = run_command(command) try: players_to_match = json.loads(result) for player in players_to_match: try: full_name, first_last = NameMatcher.fix_name( player['full_name']) player['full_name'] = full_name player['first_last'] = first_last player = NameMatcher.match_player( to_match=player, match_from=players_match_from, site_id_key='espn_id') if player.get('espn_id') is not None: matched_players.append(player) else: unmatched_players.append(player) except Exception as ie: logging.exception('%s: threw inner exception' % player['full_name']) except Exception as e: logging.exception('%s: threw outer exception') print json.dumps(matched_players, indent=4, sort_keys=True)
def _parse_row(self, sheet, rowidx, column_map): """ Private method :param sheet(xlrd worksheet object): :return players (list of dictionary): """ cells = [] # loop through list of columns you want to scrape for column in self.wanted_cols: colidx = column_map.get(column, None) if colidx is not None: cell_value = str(sheet.cell(rowidx, colidx).value) cells.append(cell_value) else: logging.error("could not find column index for %s" % column) fixed_column_names = self._fix_headers(self.wanted_cols) player = dict(zip(fixed_column_names, cells)) first_last, full_name = NameMatcher.fix_name(player["full_name"]) player["first_last"] = first_last player["full_name"] = full_name logging.debug("player is %s" % player) return player
def _parse_row(self, sheet, rowidx, column_map): ''' Private method :param sheet(xlrd worksheet object): :return players (list of dictionary): ''' cells = [] # loop through list of columns you want to scrape for column in self.wanted_cols: colidx = column_map.get(column, None) if colidx is not None: cell_value = str(sheet.cell(rowidx, colidx).value) cells.append(cell_value) else: logging.error('could not find column index for %s' % column) fixed_column_names = self._fix_headers(self.wanted_cols) player = dict(zip(fixed_column_names, cells)) first_last, full_name = NameMatcher.fix_name(player['full_name']) player['first_last'] = first_last player['full_name'] = full_name logging.debug('player is %s' % player) return player
def main(): p = MyFantasyLeagueNFLParser() matched_players = [] unmatched_players = [] # MyFantasyLeagueNFLParser.players returns a list of dictionaries # Easier to do name matching if transform to dictionary where full_name is key, player is value fname = 'myfantasyleague_projections/players.xml' positions = ['QB', 'RB', 'WR', 'TE'] players_match_from = {player['full_name']: player for player in p.players(positions=positions, fname=fname)} # now get players from other sites for site, command in commands.items(): result = run_command(command) try: players_to_match = json.loads(result) for player in players_to_match: try: full_name, first_last = NameMatcher.fix_name(player['full_name']) player['full_name'] = full_name player['first_last'] = first_last player = NameMatcher.match_player(to_match=player, match_from=players_match_from, site_id_key='espn_id') if player.get('espn_id') is not None: matched_players.append(player) else: unmatched_players.append(player) except Exception as ie: logging.exception('%s: threw inner exception' % player['full_name']) except Exception as e: logging.exception('%s: threw outer exception') print json.dumps(matched_players, indent=4, sort_keys=True)
def _parse_projections_rows(self, reader): ''' TODO :param headers: :param row: :return: ''' players = [] for row in reader: row = {k: v for k, v in row.items() if k and v} # fantasypros lists position as RB1, QB2, so need to strip numbers row['position'] = ''.join(i for i in row['position'] if not i.isdigit()) # standardize names for lookup full_name, first_last = NameMatcher.fix_name(row['full_name']) row['full_name'] = full_name row['first_last'] = first_last players.append(row) return players
mfl = MyFantasyLeagueNFLParser() # MyFantasyLeagueNFLParser.players returns a list of dictionaries # Easier to do name matching if transform to dictionary where full_name is key, player is value fname = 'myfantasyleague_projections/players.xml' positions = ['QB', 'RB', 'WR', 'TE'] players_match_from = { p['full_name']: p for p in mfl.players(positions=positions, fname=fname) } save_json(mfl_json, players_match_from) for i in xrange(len(players)): try: players[i] = NameMatcher.match_player( to_match=players[i], match_from=players_match_from, site_id_key='espn_id') except: pass print json.dumps(players, indent=4, sort_keys=True) jsonfn = args.get('save_json', None) if jsonfn: save_json(jsonfn, players) csvfn = args.get('save_csv', None) if csvfn: save_csv(csvfn, players)
if os.path.exists(mfl_json): with open(mfl_json) as data_file: players_match_from = json.load(data_file) else: mfl = MyFantasyLeagueNFLParser() # MyFantasyLeagueNFLParser.players returns a list of dictionaries # Easier to do name matching if transform to dictionary where full_name is key, player is value fname = 'myfantasyleague_projections/players.xml' positions = ['QB', 'RB', 'WR', 'TE'] players_match_from = {player['full_name']: player for player in mfl.players(positions=positions, fname=fname)} save_json(mfl_json, players_match_from) for i in xrange(len(players)): try: players[i] = NameMatcher.match_player(to_match=players[i], match_from=players_match_from, site_id_key='espn_id') except: pass # dump to stdout pprint.pprint(players, indent=4) # save to file if parameter set jsonfn = args.get('save_json', None) if jsonfn: save_json(jsonfn, players) csvfn = args.get('save_csv', None) if csvfn: save_csv(csvfn, players)