def get_team_info_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Info', name ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) team_info = DataModel.getTeamInfo(session, int(name)) if team_info is None: json_str = '' else: result = [] result.append('{ "team": "%s", "team_data" : [\n' % name) result.append(' { "name": "%s", "value": "%s" }' % ('nickname', team_info.nickname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('affiliation', team_info.fullname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('location', team_info.location)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('rookie_season', team_info.rookie_season)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('website', team_info.website)) result.append('\n') result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_teaminfo.json' % (comp,name), 'text', json_str) except: raise return json_str
def get_team_scouting_notes_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Notes For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_notes" : [\n') team_notes = DataModel.getTeamNotes(session, name, comp) for note in team_notes: result.append(' { "tag": "%s", "note": "%s" }' % (note.tag,note.data)) result.append(',\n') if len(team_notes) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_notes.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_team_participation_json(): team_participation = {} filename = "team_participation" my_config = ScoutingAppMainWebServer.global_config session = DbSession.open_db_session(my_config["db_name"] + my_config["this_season"]) teams = session.query(DataModel.TeamInfo).all() for team in teams: team_key = "frc%d" % team.team if team.first_competed is not None: info = {} info["first_competed"] = team.first_competed info["last_competed"] = team.last_competed team_participation[team_key] = info team_participation_json = json.dumps(team_participation) team_participation_js = "var %s = '%s';" % (filename, team_participation_json) # store the geo location information to a file, too try: FileSync.put(my_config, "GlobalData/%s.json" % (filename), "text", team_participation_json) FileSync.put(my_config, "GlobalData/%s.js" % (filename), "text", team_participation_js) except: raise return team_participation_json
def get_geo_location_json(include_teams=True, include_events=True): geo_locations = {} filename = "geo_coordinates_for" my_config = ScoutingAppMainWebServer.global_config session = DbSession.open_db_session(my_config["db_name"] + my_config["this_season"]) if include_events: filename += "_Events" events = session.query(DataModel.EventInfo).all() for event in events: if event.geo_location is not None: geo_locations[event.event_key] = json.loads(event.geo_location) if include_teams: filename += "_Teams" teams = session.query(DataModel.TeamInfo).all() for team in teams: if team.geo_location is not None: team_key = "frc%d" % team.team geo_locations[team_key] = json.loads(team.geo_location) geo_location_json = json.dumps(geo_locations) geo_location_js = "var %s = '%s';" % (filename, geo_location_json) # store the geo location information to a file, too try: FileSync.put(my_config, "GlobalData/%s.json" % (filename), "text", geo_location_json) FileSync.put(my_config, "GlobalData/%s.js" % (filename), "text", geo_location_js) except: raise return geo_location_json
def get_team_score_json(global_config, name, comp, store_json_file=False): global_config['logger'].debug( 'GET Team %s Score For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s", ' % (comp,name)) team_scores = DataModel.getTeamScore(session, name, comp) if len(team_scores)==1: result.append('"score": "%s" }' % team_scores[0].score) else: result.append(' "score": [') for score in team_scores: result.append(score.json()) result.append(',\n') if len(team_scores) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_score.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_team_score_breakdown_json(global_config, name, comp=None, store_json_file=False): global_config['logger'].debug( 'GET Team Score Breakdown: %s', name ) if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) result = [] result.append('{ "score_breakdown": [\n') team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) if attr_def: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if weight != 0: if stat_type == 'Average': value = int(attribute.cumulative_value/attribute.num_occurs) else: value = int(attribute.cumulative_value) data_str = '{"attr_name": "%s", "raw_score": %d, "weighted_score": %d}' % (attribute.attr_name,int(value),int(weight*value)) result.append(data_str) result.append(',\n') if len(team_attributes) > 0: result = result[:-1] result.append('\n') result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_scorebreakdown.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_team_scouting_mediafiles_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Mediafiles For Competition %s', name, comp ) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_mediafiles" : [\n') input_dir = './static/data/' + comp + '/ScoutingPictures/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.jpg|mp4' mediafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) for filename in mediafiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if len(mediafiles) > 0: result = result[:-1] result.append(' ],\n') result.append(' "thumbnailfiles" : [\n') ImageFileUtils.create_thumbnails(mediafiles) thumbnail_dir = input_dir + "Thumbnails/" pattern = '[0-9]*x[0-9]*_Team' + name + '_' + '[a-zA-Z0-9_]*.jpg|mp4' thumbnailfiles = get_datafiles(thumbnail_dir, re.compile(pattern), False, global_config['logger']) for filename in thumbnailfiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if len(thumbnailfiles) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_mediafiles.json' % (comp,name), 'text', json_str) except: raise return json_str
def get_event_rank_list_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rank List Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) rankings = rankings_data.get('rankings', []) if len(rankings): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : [\n') for team_rank in rankings: result.append(' { "rank": %d, "team_number": %s, "status": "available" }' % (team_rank['rank'],team_rank['team_key'].replace('frc',''))) result.append(',\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/ranklist.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/ranklist.json' % (competition), 'text', json_str) except: raise return json_str
def get_team_list_json(global_config, season, event, store_json_file=False): global team_info_dict global_config['logger'].debug( 'GET Team List For Competition %s', event ) comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "teams" : [\n') ''' team_list = DataModel.getTeamsInNumericOrder(session, comp) for team in team_list: team_info = None # TODO - Remove this hardcoded number for the valid team number. This check prevents # requesting information for invalid team numbers, which has been known to happen when # tablet operators enter bogus team numbers by mistake if team.team < 10000: team_info = DataModel.getTeamInfo(session, int(team.team)) if team_info: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,team_info.nickname)) result.append(',\n') else: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,'Unknown')) result.append(',\n') if len(team_list) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) else: ''' json_str = get_team_list_json_from_tba(global_config, comp) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'teams'), 'text', json_str) except: raise session.remove() return json_str
def get_team_datafile_json(global_config, filename, store_json_file=False): global_config['logger'].debug( 'GET Team Data File Json: %s', filename ) comp, fname = filename.split('/', 1) filepath = './static/data/' + comp + '/ScoutingData/' + fname datafile = open( filepath, "r" ) team = fname.split('_')[0].lstrip('Team') result = [] result.append('{ "competition": "%s",\n' % comp) result.append(' "team": "%s",\n' % team) result.append(' "filename": "%s",\n' % fname) result.append(' "scouting_data": [\n') while 1: lines = datafile.readlines(500) if not lines: break for line in lines: line = line.rstrip('\n') try: name, value = line.split(':',1) except: pass result.append(' { "name": "%s", "value": "%s" }' % (name,value)) result.append(',\n') if len(lines) > 0: result = result[:-1] result.append('] }\n') json_str = ''.join(result) if store_json_file is True: try: short_fname = fname.replace('.txt','') FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_file_%s.json' % (comp,team,short_fname), 'text', json_str) except: raise return json_str
def create_picklist_json(global_config, comp=None, store_json_file=False): global_config['logger'].debug( 'Create Picklist Json' ) global local_picklist store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "picklist": [\n') local_picklist = DataModel.getTeamsInRankOrder(session, comp, True) rank = 1 for team in local_picklist: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: row = '{ "rank" : %d, "team" : %d, "score" : %d, "competition" : "%s" }' % (rank, team.team, int(team.score), team.competition) result.append(row) result.append(',\n') rank += 1 if len(result) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'picklist'), 'text', json_str) except: raise session.remove() return json_str
def get_saved_filter_json( global_config, filter_name, store_data_to_file=False ): global saved_filters competition = global_config['this_competition'] + global_config['this_season'] if len(saved_filters) == 0: ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' ) result = [] result.append('{ "filters": [\n') if filter_name != None: try: filter_name = filter_name.title() filter_str = saved_filters[filter_name] except: pass result.append(' { "name": "%s", "filter_str": "%s" }\n' % (filter_name,filter_str)) else: for name, filter_str in saved_filters.iteritems(): result.append(' { "name": "%s", "filter_str": "%s" }' % (name,filter_str)) result.append(',\n') if len(saved_filters) > 0: result = result[:-1] result.append('] }\n') json_str = ''.join(result) if store_data_to_file: try: if filter_name == None: file_name = 'attrfilters.json' else: file_name = 'attrfilter_%s.json' % filter_name FileSync.put( global_config, '%s/EventData/%s' % (competition,file_name), 'text', json_str) except: raise return json_str
def update_picklist_json(global_config, from_position, to_position, comp=None, store_json_file=True): global_config['logger'].debug( 'Create Picklist Json' ) global local_picklist if local_picklist is None: create_picklist_json(global_config, comp, store_json_file=True) result = [] result.append('{ "picklist": [\n') if comp == None: comp = global_config['this_competition'] + global_config['this_season'] item_to_update = local_picklist.pop( from_position-1 ) local_picklist.insert(to_position-1, item_to_update) rank = 1 for team in local_picklist: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: row = '{ "rank" : %d, "team" : %d, "score" : %d, "competition" : "%s" }' % (rank, team.team, int(team.score), team.competition) result.append(row) result.append(',\n') rank += 1 if len(result) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'picklist'), 'text', json_str) except: raise return json_str
def get_team_scouting_datafiles_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Datafiles For Competition %s', name, comp ) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_datafiles" : [\n') input_dir = './static/data/' + comp + '/ScoutingData/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.txt' datafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) for filename in datafiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if store_json_file is True: get_team_datafile_json( global_config, comp + '/' + basefile, store_json_file ) if len(datafiles) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_datafiles.json' % (comp,name), 'text', json_str) except: raise return json_str
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] tba_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if tba_data: rankings = tba_data.get('rankings') if rankings is not None: result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Rank', 'Team', 'Record', 'Matches_Played', 'Dq' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings: result.append(' [ ') for item in headings: key = item.lower() if key == 'record': result.append('"%s-%s-%s"' % (str(line[key]['wins']),str(line[key]['losses']),str(line[key]['ties']))) elif key == 'team': team_str = line['team_key'].replace('frc','') result.append(('"<a href=\\"/teamdata/%s/'% competition)+team_str+'\\">'+team_str+'</a>"') else: result.append('"%s"' % (str(line[key]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str
result.append(' ]\n') store_data_to_file = True except Exception, err: print 'Caught exception:', err except: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/%s%s.json' % (competition,query_str,round_str) ) if stored_file_data != '': return stored_file_data result.append(' ] }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/%s%s.json' % (competition,query_str,round_str), 'text', json_str) except: raise return json_str def update_event_data_files( global_config, year, event, directory ): result = False event_code = CompAlias.get_eventcode_by_alias(event) my_team = global_config['my_team'] # for now, we only support updating files in the EventData directory, so only continue if that's the # directory that was specified. if directory.upper() == 'EVENTDATA':
def get_team_attr_rankings_json(global_config, comp=None, attr_name=None): global_config['logger'].debug( 'GET Team Attribute Rankings Json' ) store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions() attr_definitions.parse(attrdef_filename) attr_def = attr_definitions.get_definition(attr_name) try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' web.header('Content-Type', 'application/json') result = [] result.append('{ "attr_name" : "%s",\n' % attr_name) # add the columns bases on the attribute definition type result.append(' "columns" : [\n') result.append(' { "sTitle": "Team" }') result.append(',\n') columns = [] if attr_def['Type'] == 'Map_Integer': map_values = attr_def['Map_Values'].split(':') for map_value in map_values: item_name = map_value.split('=')[0] columns.append(item_name) result.append(' { "sTitle": "%s" }' % item_name) result.append(',\n') result = result[:-1] result.append('\n') result.append(' ],\n') if stat_type == 'Average': team_rankings = DataModel.getTeamAttributesInAverageRankOrder(session, comp, attr_name) else: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name) result.append(' "rankings" : [\n') for team_attr in team_rankings: data_str = ' [ %d,' % team_attr.team value_dict = DataModel.mapAllValuesToDict(attr_def, team_attr.all_values) for column in columns: try: value = value_dict[column] except: value = 0 data_str += ' %d,' % value data_str = data_str.rstrip(',') data_str += ' ]' result.append(data_str) result.append(',\n') if len(team_rankings) > 0: result = result[:-1] result.append('\n') result.append(' ]\n}') json_str = ''.join(result) if store_data_to_file is True: try: file_name = 'attrrankings_%s' % attr_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def get_team_rankings_json(global_config, season, event, attr_filters=[], filter_name=None, thumbnails = False, store_json_file=False): global_config['logger'].debug( 'GET Team Rankings Json' ) store_data_to_file = False comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "rankings": [\n') rank_added = False if len(attr_filters) == 0: team_rankings = DataModel.getTeamsInRankOrder(session, comp, False) for team in team_rankings: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team.team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (team.score, comp, team.team,thumbnails_snippet)) result.append(',\n') rank_added = True else: # we'll need the attribute definitions in order to retrieve the correct attribute value # and attribute weighting attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) team_rank_dict = dict() for attr_filter in attr_filters: try: attr_name, attr_value = attr_filter.split('=') except: attr_name = attr_filter attr_value = None attr_def = attr_definitions.get_definition(attr_name) if attr_value is None: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name, False) for team in team_rankings: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if stat_type == 'Average': score = int(team.cumulative_value/team.num_occurs*weight) else: score = int(team.cumulative_value*weight) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score else: team_rankings = DataModel.getTeamAttributesWithValue(session, comp, attr_name, attr_value, False) for team in team_rankings: score = team.all_values.count(attr_value) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score sorted_team_rank = sorted(team_rank_dict.items(), key=operator.itemgetter(1)) for team, score in sorted_team_rank: # round the score to an integer value score = float(int(score)) if score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (score, comp, team, thumbnails_snippet)) result.append(',\n') rank_added = True if rank_added == True: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: if filter_name is None: file_name = 'scoutingrankings' else: file_name = 'scoutingrankings_%s' % filter_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def get_attr_tree_json(global_config, filter_name = None, store_data_to_file=False): global_config['logger'].debug( 'GET Attribute Definitions Tree JSON' ) attrdef_filename = './config/' + global_config['attr_definitions'] attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) competition = global_config['this_competition'] + global_config['this_season'] #categories = attr_definitions.get_sub_categories() attr_filter = get_saved_filter(filter_name) result = [] result.append('{ "item": [\n') if filter_name != None: checked = 0 else: checked = 1 opened = 1 result.append(' { "text": "%s", "open": %d, "checked": %d, "id": "Skip_%s", "item": [ \n' % ('All Attributes',opened,checked,'All')) category_dict = attr_definitions.get_attr_dict_by_category() for category, attrlist in category_dict.iteritems(): if category != 'Uncategorized': result.append(' { "text": "%s", "checked": %d, "id": "Skip_%s", "item": [ \n' % (category,checked,category)) for attrname in sorted(attrlist): checked_ind = 0 if filter_name is None: # if there is no specified filter, then set the checked indicator based on the overall setting checked_ind = checked else: # otherwise, if a filter is specified, then set the checked indicator based on if the attribute # name is specified in the filter list if attrname in attr_filter: checked_ind = 1 tree_item_str = get_attr_def_item_json( global_config, attr_definitions.get_definition(attrname), attr_filter, checked_ind ) result.append(tree_item_str) result.append(',\n') if len(attrlist) > 0: result = result[:-1] result.append('\n') result.append( ' ] }') result.append(',\n') attrlist = category_dict['Uncategorized'] if len( attrlist ) > 0: for attrname in sorted(attrlist): checked_ind = 0 if filter_name is None: # if there is no specified filter, then set the checked indicator based on the overall setting checked_ind = checked else: # otherwise, if a filter is specified, then set the checked indicator based on if the attribute # name is specified in the filter list if attrname in attr_filter: checked_ind = 1 tree_item_str = get_attr_def_item_json( global_config, attr_definitions.get_definition(attrname), attr_filter, checked_ind ) result.append(tree_item_str) result.append(',\n') result = result[:-1] result.append('],\n "id": 1 \n}\n') result.append('],\n "id": 0 \n}\n') json_str = ''.join(result) if store_data_to_file: try: if filter_name == None: file_name = 'attrtree.json' else: file_name = 'attrtree_%s.json' % filter_name FileSync.put( global_config, '%s/EventData/%s' % (competition,file_name), 'text', json_str) except: raise return json_str
def PUT(self, request_path): content_type = web.ctx.env['CONTENT_TYPE'] FileSync.put(global_config, request_path, content_type, web.data()) return
filename += "_Events" events = session.query(DataModel.EventInfo).all() event_geo_info = {} team_geo_info = {} for event in events: if event.geo_location is not None: try: event_year_info = event_geo_info[event.event_year] geo_locations[event.event_key] = handle_geo_overlap(event_year_info, event.geo_location, True) except: event_geo_info[event.event_year] = {} event_geo_info[event.event_year][event.geo_location] = event.geo_location geo_locations[event.event_key] = json.loads(event.geo_location) filename += "_Teams" teams = session.query(DataModel.TeamInfo).all() for team in teams: if team.geo_location is not None: team_key = "frc%d" % team.team geo_locations[team_key] = handle_geo_overlap(team_geo_info, team.geo_location, False) geo_location_json = json.dumps(geo_locations) geo_location_js = "var %s = '%s';" % (filename, geo_location_json) # store the geo location information to a file, too FileSync.put({}, "GlobalData/%s.json" % (filename), "text", geo_location_json) FileSync.put({}, "GlobalData/%s.js" % (filename), "text", geo_location_js)
def get_event_matchschedule_json(global_config, year, event_code, team_str = None): global_config['logger'].debug( 'GET Event Schedule Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()) ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) match_schedule = dict() match_schedule['event'] = event_code.lower() match_schedule['columns'] = [ 'Round', 'Match', 'Red_1', 'Red_2', 'Red_3', 'Blue_1', 'Blue_2', 'Blue_3' ] match_schedule['qualification'] = [] match_schedule['quarter_finals'] = [] match_schedule['semi_finals'] = [] match_schedule['finals'] = [] if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: comp_level = match['comp_level'] if comp_level in ('qf', 'sf'): match_str = '%s-%s' % (match['set_number'],match['match_number']) else: match_str = str(match['match_number']) match_entry = [ comp_level, match_str, match['alliances']['red']['team_keys'][0].lstrip('frc'), match['alliances']['red']['team_keys'][1].lstrip('frc'), match['alliances']['red']['team_keys'][2].lstrip('frc'), match['alliances']['blue']['team_keys'][0].lstrip('frc'), match['alliances']['blue']['team_keys'][1].lstrip('frc'), match['alliances']['blue']['team_keys'][2].lstrip('frc') ] if comp_level == 'qm': match_schedule['qualification'].append(match_entry) elif comp_level == 'qf': match_schedule['quarter_finals'].append(match_entry) elif comp_level == 'sf': match_schedule['semi_finals'].append(match_entry) elif comp_level == 'f': match_schedule['finals'].append(match_entry) store_data_to_file = True # the qualification match schedule needs to be sorted, the sort will be done by the second # element of each row, which is the match number match_schedule['qualification'].sort(key=lambda match_list: int(match_list[1])) else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename) ) if stored_file_data != '': return stored_file_data json_str = json.dumps(match_schedule) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename), 'text', json_str) except: raise return json_str
def get_event_matchresults_json(global_config, year, event_code, round_str, team_str = None): global_config['logger'].debug( 'GET Event Results Json' ) if round_str == 'qual': match_selector = 'qm' elif round_str == 'quarters': match_selector = 'qf' elif round_str == 'semis': match_selector = 'sf' elif round_str == 'finals': match_selector = 'f' # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()), api_version='v3' ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Match', 'Start Time', 'Red 1', 'Red 2', 'Red 3', 'Blue 1', 'Blue 2', 'Blue 3', 'Red Score', 'Blue Score' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "matchresults" : [\n') # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: if str(match['comp_level']) == match_selector: result.append(' [ ') # Match number try: if global_config['json_no_links'] == 'Yes': result.append( '"%s", ' % str(match['match_number']) ) else: raise except: result.append( '"%s", ' % get_match_hyperlink(competition, match) ) # Match start time match_epoch_time = int(match['time']) time_format_str = '%a %b %d - %I:%M %p' match_time_str = datetime.datetime.fromtimestamp(match_epoch_time).strftime(time_format_str) result.append( '"%s", ' % match_time_str ) try: if global_config['json_no_links'] == 'Yes': # Red alliance teams result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) # Blue alliance teams result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) else: raise except: # Red alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) ) # Blue alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) ) # Red alliance score score = str(match['alliances']['red']['score']) if score == '-1': score = '-' result.append( '"%s", ' % score ) # Blue alliance score score = str(match['alliances']['blue']['score']) if score == '-1': score = '-' result.append( '"%s" ' % score ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename), 'text', json_str) except: raise return json_str
def get_team_scouting_data_summary_json(global_config, comp, name, attr_filter=[], filter_name=None, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Data For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) if global_config['attr_definitions'] == None: return None attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_data_summary" : [\n') team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) if len(team_attributes) > 0: some_attr_added = False for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) include_attr = False if attr_def: if attr_def.has_key('Include_In_Team_Display') \ and attr_def['Include_In_Team_Display'] == 'Yes': include_attr = True elif attr_def.has_key('Include_In_Report') \ and attr_def['Include_In_Report'] == 'Yes': include_attr = True elif attr_def.has_key('Weight') \ and attr_def['Weight'] != '0': include_attr = True # if an attribute filter has been provided, only include the attribute data if the # attribute is in the filter if len(attr_filter) > 0: if attr_def['Name'] not in attr_filter: include_attr = False if include_attr == True: some_attr_added = True if attr_def.has_key('Display_Name'): attr_name = attr_def['Display_Name'] else: attr_name = attr_def['Name'] category = attr_def.get('Sub_Category', '') result.append(' { "name": "%s", "matches": "%s", "cumulative_value": "%s", "average_value": "%s", "all_values": "%s", "category": "%s" }' % \ (attr_name,str(attribute.num_occurs),str(attribute.cumulative_value),str(round(attribute.avg_value,1)),\ DataModel.mapAllValuesToShortenedString(attr_def, attribute.all_values), category) ) result.append(',\n') if some_attr_added: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_data_summary.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_event_stats_json(global_config, year, event_code, stat_type): global_config['logger'].debug( 'GET Event Results Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) event_stats = get_event_data_from_tba( '%s%s/oprs' % (year,event_code.lower()) ) if len(event_stats): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Team', stat_type.rstrip('s').upper() ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "stats" : [\n') try: stats_dict = event_stats[stat_type] for key, value in stats_dict.iteritems(): result.append( ' ["%s", %.2f' % (get_team_hyperlink( competition, key.lstrip('frc') ),value) ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') except: global_config['logger'].debug( 'No Statistics Data For %s' % stat_type ) result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type), 'text', json_str) except: raise return json_str
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if rankings: # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+str(line[i])+'\\">'+str(line[i])+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (str(line[i]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str