def get_data_from_first(global_config, year, event_code, query_str, round_str = '', table_to_parse=2): result = [] store_data_to_file = False # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) result.append('{ "event" : "%s",\n' % (event_code.lower())) try: opener = urllib2.build_opener() opener.addheaders = [('Authorization', 'Basic a3N0aGlsYWlyZTozMDMyRTUxRS1CNkFBLTQ2QzgtOEY5Qy05QzdGM0EzM0Q4RjI='),('Accept','application/json')] #first_url_str = 'http://frc-events.usfirst.org/%s/%s/%s' % (year,event_code.upper(),query_str) first_url_str = 'https://frc-api.usfirst.org/api/v1.0/rankings/%s/%s' % (year,event_code.upper()) print 'GET - %s' % first_url_str rank_data = opener.open(first_url_str) my_parser = RankParser() rankings, _ = my_parser.parse( rank_data, table_to_parse ) # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "%s" : [\n' % (query_str)) for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+line[i]+'\\">'+line[i]+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (line[i])) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True except Exception, err: print 'Caught exception:', err
def get_data_from_first(global_config, year, event_code, query_str, table_to_parse=2): web.header('Content-Type', 'application/json') result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) try: first_url_str = 'http://www2.usfirst.org/%scomp/events/%s/%s.html' % (year,event_code.upper(),query_str) rank_data = urllib2.urlopen(first_url_str).read() my_parser = RankParser() rankings, _ = my_parser.parse( rank_data, table_to_parse ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "%s" : [\n' % (query_str)) for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+line[i]+'\\">'+line[i]+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (line[i])) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') except: pass result.append(' ] }\n') return ''.join(result)
def get_event_rank_list_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rank List Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) rankings = rankings_data.get('rankings', []) if len(rankings): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : [\n') for team_rank in rankings: result.append(' { "rank": %d, "team_number": %s, "status": "available" }' % (team_rank['rank'],team_rank['team_key'].replace('frc',''))) result.append(',\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/ranklist.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/ranklist.json' % (competition), 'text', json_str) except: raise return json_str
def get_team_list_json(global_config, season, event, store_json_file=False): global team_info_dict global_config['logger'].debug( 'GET Team List For Competition %s', event ) comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "teams" : [\n') ''' team_list = DataModel.getTeamsInNumericOrder(session, comp) for team in team_list: team_info = None # TODO - Remove this hardcoded number for the valid team number. This check prevents # requesting information for invalid team numbers, which has been known to happen when # tablet operators enter bogus team numbers by mistake if team.team < 10000: team_info = DataModel.getTeamInfo(session, int(team.team)) if team_info: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,team_info.nickname)) result.append(',\n') else: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,'Unknown')) result.append(',\n') if len(team_list) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) else: ''' json_str = get_team_list_json_from_tba(global_config, comp) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'teams'), 'text', json_str) except: raise session.remove() return json_str
def get_team_event_list_from_tba(global_config, team, season): global_config['logger'].debug( 'GET Team Event List TBA' ) result = [] url_str = '/api/v2/team/frc%s/%s/events' % (team,season) try: event_dict = TbaIntf.get_from_tba_parsed(url_str) for event in event_dict: comp = WebCommonUtils.map_event_code_to_comp(event['event_code'], season) result.append(comp) except: pass return result
def get_event_stats_json(global_config, year, event_code, stat_type): global_config['logger'].debug( 'GET Event Results Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) event_stats = get_event_data_from_tba( '%s%s/oprs' % (year,event_code.lower()) ) if len(event_stats): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Team', stat_type.rstrip('s').upper() ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "stats" : [\n') try: stats_dict = event_stats[stat_type] for key, value in stats_dict.iteritems(): result.append( ' ["%s", %.2f' % (get_team_hyperlink( competition, key.lstrip('frc') ),value) ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') except: global_config['logger'].debug( 'No Statistics Data For %s' % stat_type ) result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type), 'text', json_str) except: raise return json_str
def get_event_matchschedule_json(global_config, year, event_code, team_str = None): global_config['logger'].debug( 'GET Event Schedule Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()) ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) match_schedule = dict() match_schedule['event'] = event_code.lower() match_schedule['columns'] = [ 'Round', 'Match', 'Red_1', 'Red_2', 'Red_3', 'Blue_1', 'Blue_2', 'Blue_3' ] match_schedule['qualification'] = [] match_schedule['quarter_finals'] = [] match_schedule['semi_finals'] = [] match_schedule['finals'] = [] if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: comp_level = match['comp_level'] if comp_level in ('qf', 'sf'): match_str = '%s-%s' % (match['set_number'],match['match_number']) else: match_str = str(match['match_number']) match_entry = [ comp_level, match_str, match['alliances']['red']['team_keys'][0].lstrip('frc'), match['alliances']['red']['team_keys'][1].lstrip('frc'), match['alliances']['red']['team_keys'][2].lstrip('frc'), match['alliances']['blue']['team_keys'][0].lstrip('frc'), match['alliances']['blue']['team_keys'][1].lstrip('frc'), match['alliances']['blue']['team_keys'][2].lstrip('frc') ] if comp_level == 'qm': match_schedule['qualification'].append(match_entry) elif comp_level == 'qf': match_schedule['quarter_finals'].append(match_entry) elif comp_level == 'sf': match_schedule['semi_finals'].append(match_entry) elif comp_level == 'f': match_schedule['finals'].append(match_entry) store_data_to_file = True # the qualification match schedule needs to be sorted, the sort will be done by the second # element of each row, which is the match number match_schedule['qualification'].sort(key=lambda match_list: int(match_list[1])) else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename) ) if stored_file_data != '': return stored_file_data json_str = json.dumps(match_schedule) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename), 'text', json_str) except: raise return json_str
def get_event_matchresults_json(global_config, year, event_code, round_str, team_str = None): global_config['logger'].debug( 'GET Event Results Json' ) if round_str == 'qual': match_selector = 'qm' elif round_str == 'quarters': match_selector = 'qf' elif round_str == 'semis': match_selector = 'sf' elif round_str == 'finals': match_selector = 'f' # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()), api_version='v3' ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Match', 'Start Time', 'Red 1', 'Red 2', 'Red 3', 'Blue 1', 'Blue 2', 'Blue 3', 'Red Score', 'Blue Score' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "matchresults" : [\n') # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: if str(match['comp_level']) == match_selector: result.append(' [ ') # Match number try: if global_config['json_no_links'] == 'Yes': result.append( '"%s", ' % str(match['match_number']) ) else: raise except: result.append( '"%s", ' % get_match_hyperlink(competition, match) ) # Match start time match_epoch_time = int(match['time']) time_format_str = '%a %b %d - %I:%M %p' match_time_str = datetime.datetime.fromtimestamp(match_epoch_time).strftime(time_format_str) result.append( '"%s", ' % match_time_str ) try: if global_config['json_no_links'] == 'Yes': # Red alliance teams result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) # Blue alliance teams result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) else: raise except: # Red alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) ) # Blue alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) ) # Red alliance score score = str(match['alliances']['red']['score']) if score == '-1': score = '-' result.append( '"%s", ' % score ) # Blue alliance score score = str(match['alliances']['blue']['score']) if score == '-1': score = '-' result.append( '"%s" ' % score ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename), 'text', json_str) except: raise return json_str
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] tba_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if tba_data: rankings = tba_data.get('rankings') if rankings is not None: result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Rank', 'Team', 'Record', 'Matches_Played', 'Dq' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings: result.append(' [ ') for item in headings: key = item.lower() if key == 'record': result.append('"%s-%s-%s"' % (str(line[key]['wins']),str(line[key]['losses']),str(line[key]['ties']))) elif key == 'team': team_str = line['team_key'].replace('frc','') result.append(('"<a href=\\"/teamdata/%s/'% competition)+team_str+'\\">'+team_str+'</a>"') else: result.append('"%s"' % (str(line[key]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str
def get_team_rankings_json(global_config, season, event, attr_filters=[], filter_name=None, thumbnails = False, store_json_file=False): global_config['logger'].debug( 'GET Team Rankings Json' ) store_data_to_file = False comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "rankings": [\n') rank_added = False if len(attr_filters) == 0: team_rankings = DataModel.getTeamsInRankOrder(session, comp, False) for team in team_rankings: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team.team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (team.score, comp, team.team,thumbnails_snippet)) result.append(',\n') rank_added = True else: # we'll need the attribute definitions in order to retrieve the correct attribute value # and attribute weighting attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) team_rank_dict = dict() for attr_filter in attr_filters: try: attr_name, attr_value = attr_filter.split('=') except: attr_name = attr_filter attr_value = None attr_def = attr_definitions.get_definition(attr_name) if attr_value is None: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name, False) for team in team_rankings: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if stat_type == 'Average': score = int(team.cumulative_value/team.num_occurs*weight) else: score = int(team.cumulative_value*weight) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score else: team_rankings = DataModel.getTeamAttributesWithValue(session, comp, attr_name, attr_value, False) for team in team_rankings: score = team.all_values.count(attr_value) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score sorted_team_rank = sorted(team_rank_dict.items(), key=operator.itemgetter(1)) for team, score in sorted_team_rank: # round the score to an integer value score = float(int(score)) if score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (score, comp, team, thumbnails_snippet)) result.append(',\n') rank_added = True if rank_added == True: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: if filter_name is None: file_name = 'scoutingrankings' else: file_name = 'scoutingrankings_%s' % filter_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def GET(self, event_code): WebLogin.check_access(global_config,4) comp = WebCommonUtils.map_event_code_to_comp(event_code) DataModel.recalculate_scoring(global_config, comp) raise web.seeother('/event/%s' % event_code)
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if rankings: # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+str(line[i])+'\\">'+str(line[i])+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (str(line[i]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str