def get(self, year): year = int(year) team_districts = defaultdict(list) logging.info("Fetching events in {}".format(year)) year_events = Event.query( year == Event.year, Event.event_district_enum != DistrictType.NO_DISTRICT, Event.event_district_enum != None).fetch() for event in year_events: logging.info("Fetching EventTeams for {}".format(event.key_name)) event_teams = EventTeam.query(EventTeam.event == event.key).fetch() for event_team in event_teams: team_districts[event_team.team.id()].append( event.event_district_enum) new_district_teams = [] for team_key, districts in team_districts.iteritems(): most_frequent_district = max(set(districts), key=districts.count) logging.info("Assuming team {} belongs to {}".format( team_key, DistrictType.type_names[most_frequent_district])) dt_key = DistrictTeam.renderKeyName(year, most_frequent_district, team_key) new_district_teams.append( DistrictTeam(id=dt_key, year=year, team=ndb.Key(Team, team_key), district=most_frequent_district)) logging.info("Finishing updating old district teams from event teams") DistrictTeamManipulator.createOrUpdate(new_district_teams) self.response.out.write( "Finished creating district teams for {}".format(year))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData['website'] is not None and 'www.firstinspires.org' in teamData['website']: website = None else: website = WebsiteHelper.format_url(teamData.get('website', None)) team = Team( id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], school_name=teamData.get('schoolName'), home_cmp=teamData.get('homeCMP').lower() if teamData.get('homeCMP') else None, city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear'] ) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[teamData['districtCode'].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev, district_key=ndb.Key(District, District.renderKeyName(self.year, teamData['districtCode'].lower())), ) robot = None if teamData['robotName']: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip() ) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData[ 'website'] is not None and 'www.firstinspires.org' in teamData[ 'website']: website = None else: website = WebsiteHelper.format_url( teamData.get('website', None)) team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], school_name=teamData.get('schoolName'), home_cmp=teamData.get('homeCMP').lower() if teamData.get('homeCMP') else None, city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtKey = District.renderKeyName( self.year, teamData['districtCode'].lower()) districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(districtKey, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district_key=ndb.Key(District, districtKey), ) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData[ 'website'] is not None and 'www.firstinspires.org' in teamData[ 'website']: website = None else: raw_website = teamData.get('website', None) website = urlparse.urlparse( raw_website, 'http').geturl() if raw_website else None # Fix oddity with urlparse having three slashes after the scheme (#1635) website = website.replace('///', '//') if website else None team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[ teamData['districtCode'].lower()] districtTeam = DistrictTeam(id=DistrictTeam.renderKeyName( self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # concat city/state/country to get address address = u"{}, {}, {}".format(teamData['city'], teamData['stateProv'], teamData['country']) team = Team( id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], address=address, website=teamData['website'], rookie_year=teamData['rookieYear'] ) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[teamData['districtCode'].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev ) robot = None if teamData['robotName']: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip() ) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # concat city/state/country to get address address = u"{}, {}, {}".format(teamData['city'], teamData['stateProv'], teamData['country']) team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], address=address, website=teamData['website'], rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[ teamData['districtCode'].lower()] districtTeam = DistrictTeam(id=DistrictTeam.renderKeyName( self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of models (Team, DistrictTeam, Robot) """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here teams = response["teams"] teamData = teams[0] # concat city/state/country to get address address = u"{}, {}, {}".format(teamData["city"], teamData["stateProv"], teamData["country"]) team = Team( team_number=teamData["teamNumber"], name=teamData["nameFull"], nickname=teamData["nameShort"], address=address, rookie_year=teamData["rookieYear"], ) districtTeam = None if teamData["districtCode"]: districtAbbrev = DistrictType.abbrevs[teamData["districtCode"].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev, ) robot = None if teamData["robotName"]: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData["robotName"].strip(), ) return (team, districtTeam, robot)
def get(self, year): year = int(year) team_districts = defaultdict(list) logging.info("Fetching events in {}".format(year)) year_events = Event.query(year == Event.year, Event.district_key == None, Event.event_district_enum != None).fetch() for event in year_events: logging.info("Fetching EventTeams for {}".format(event.key_name)) event_teams = EventTeam.query(EventTeam.event == event.key).fetch() for event_team in event_teams: team_districts[event_team.team.id()].append(event.district_key.id()) new_district_teams = [] for team_key, districts in team_districts.iteritems(): most_frequent_district_key = max(set(districts), key=districts.count) logging.info("Assuming team {} belongs to {}".format(team_key, most_frequent_district_key)) dt_key = DistrictTeam.renderKeyName(most_frequent_district_key, team_key) new_district_teams.append(DistrictTeam(id=dt_key, year=year, team=ndb.Key(Team, team_key), district_key=ndb.Key(District, most_frequent_district_key))) logging.info("Finishing updating old district teams from event teams") DistrictTeamManipulator.createOrUpdate(new_district_teams) self.response.out.write("Finished creating district teams for {}".format(year))