def get_team_score_breakdown_json(global_config, name, comp=None, store_json_file=False): global_config['logger'].debug( 'GET Team Score Breakdown: %s', name ) if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) result = [] result.append('{ "score_breakdown": [\n') team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) if attr_def: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if weight != 0: if stat_type == 'Average': value = int(attribute.cumulative_value/attribute.num_occurs) else: value = int(attribute.cumulative_value) data_str = '{"attr_name": "%s", "raw_score": %d, "weighted_score": %d}' % (attribute.attr_name,int(value),int(weight*value)) result.append(data_str) result.append(',\n') if len(team_attributes) > 0: result = result[:-1] result.append('\n') result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_scorebreakdown.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def recalculate_scoring(global_config, competition=None, attr_definitions=None): if competition is None: competition = global_config['this_competition'] + global_config['this_season'] if competition == None or competition == '': raise Exception( 'Competition Not Specified!') # Build the attribute definition dictionary from the definitions csv file if global_config['attr_definitions'] == None: return if attr_definitions is None: attrdef_filename = WebCommonUtils.get_attrdef_filename(competition) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) else: return session = DbSession.open_db_session(global_config['db_name'] + global_config['this_season']) team_rankings = getTeamsInRankOrder(session, competition) for team_entry in team_rankings: score = calculateTeamScore(session, team_entry.team, competition, attr_definitions) setTeamScore(session, team_entry.team, competition, score) session.commit() dump_database_as_csv_file(session, global_config, attr_definitions, competition) session.remove()
def get_team_info_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Info', name ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) team_info = DataModel.getTeamInfo(session, int(name)) if team_info is None: json_str = '' else: result = [] result.append('{ "team": "%s", "team_data" : [\n' % name) result.append(' { "name": "%s", "value": "%s" }' % ('nickname', team_info.nickname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('affiliation', team_info.fullname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('location', team_info.location)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('rookie_season', team_info.rookie_season)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('website', team_info.website)) result.append('\n') result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_teaminfo.json' % (comp,name), 'text', json_str) except: raise return json_str
def process_form(global_config, form): global_config['logger'].debug( 'Process Attribute Modify Form' ) season = form[attr_modify_season_label].value comp = form[attr_modify_comp_label].value team = form[attr_modify_team_number_label].value attr_name = form[attr_modify_attribute_name_label].value old_value = form[attr_modify_old_value_label].value new_value = form[attr_modify_new_value_label].value # Initialize the database session connection db_name = global_config['db_name'] + global_config['this_season'] session = DbSession.open_db_session(db_name) attrdef_filename = WebCommonUtils.get_attrdef_filename(short_comp=comp) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) attr_def = attr_definitions.get_definition(attr_name) try: DataModel.modifyAttributeValue(session, team, comp+season, attr_name, old_value, new_value, attr_def) result = 'Attribute %s Modified From %s to %s For Team %s' % (attr_name,old_value,new_value,team) session.commit() except ValueError as reason: result = 'Error Modifying Scouting Addribute %s For Team %s: %s' % (attr_name,team,reason) session.remove() return result
def get_team_scouting_notes_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Notes For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_notes" : [\n') team_notes = DataModel.getTeamNotes(session, name, comp) for note in team_notes: result.append(' { "tag": "%s", "note": "%s" }' % (note.tag,note.data)) result.append(',\n') if len(team_notes) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_notes.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_data_from_first(global_config, year, event_code, query_str, round_str = '', table_to_parse=2): result = [] store_data_to_file = False # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) result.append('{ "event" : "%s",\n' % (event_code.lower())) try: opener = urllib2.build_opener() opener.addheaders = [('Authorization', 'Basic a3N0aGlsYWlyZTozMDMyRTUxRS1CNkFBLTQ2QzgtOEY5Qy05QzdGM0EzM0Q4RjI='),('Accept','application/json')] #first_url_str = 'http://frc-events.usfirst.org/%s/%s/%s' % (year,event_code.upper(),query_str) first_url_str = 'https://frc-api.usfirst.org/api/v1.0/rankings/%s/%s' % (year,event_code.upper()) print 'GET - %s' % first_url_str rank_data = opener.open(first_url_str) my_parser = RankParser() rankings, _ = my_parser.parse( rank_data, table_to_parse ) # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "%s" : [\n' % (query_str)) for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+line[i]+'\\">'+line[i]+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (line[i])) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True except Exception, err: print 'Caught exception:', err
def process_delete_file_form(global_config, form): global_config['logger'].debug( 'Process Attribute Delete Form' ) data_filename = form[attr_delete_file_label].value if form[attr_remove_file_processed_label].value == 'Yes': remove_from_processed_files = True else: remove_from_processed_files = False # Initialize the database session connection db_name = global_config['db_name'] + global_config['this_season'] session = DbSession.open_db_session(db_name) attrdef_filename = WebCommonUtils.get_attrdef_filename(short_comp=global_config['this_competition']) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) try: ProcessFiles.remove_file_data(global_config, session, attr_definitions, \ data_filename, remove_from_processed_files) result = 'Scouting Data File %s Attributes Successfully Removed' % (data_filename) session.commit() except ValueError as reason: result = 'Error Removing Scouting Data File %s: %s' % (data_filename, reason) session.remove() return result
def init_issue_tracker(): global issueform global new_issueform global commentform issue_platforms = WebCommonUtils.get_issue_types() issueform = pureform( form.Textbox(issue_id_label, size=20), form.Dropdown(issue_platform_label, issue_platforms), form.Textbox(issue_summary_label, size=60), form.Dropdown(issue_status_label, issue_statuses), form.Dropdown(issue_priority_label, issue_priorities), form.Dropdown(issue_subgroup_label, issue_subgroups), form.Dropdown(issue_component_label, issue_components), form.Dropdown(issue_owner_label, issue_username_list), form.Dropdown(issue_submitter_label, issue_username_list), form.Textarea(issue_description_label, size=1024), form.Textarea(issue_comment_label, size=1024)) new_issueform = pureform( form.Dropdown(issue_platform_label, issue_platforms), form.Textbox(issue_summary_label, size=60), form.Dropdown(issue_status_label, issue_statuses), form.Dropdown(issue_priority_label, issue_priorities), form.Dropdown(issue_subgroup_label, issue_subgroups), form.Dropdown(issue_component_label, issue_components), form.Dropdown(issue_owner_label, issue_username_list), form.Dropdown(issue_submitter_label, issue_username_list), form.Textarea(issue_description_label, size=1024)) commentform = pureform( form.Textarea(issue_comment_label, size=1024))
def get_team_score_json(global_config, name, comp, store_json_file=False): global_config['logger'].debug( 'GET Team %s Score For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s", ' % (comp,name)) team_scores = DataModel.getTeamScore(session, name, comp) if len(team_scores)==1: result.append('"score": "%s" }' % team_scores[0].score) else: result.append(' "score": [') for score in team_scores: result.append(score.json()) result.append(',\n') if len(team_scores) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_score.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def process_delete_attr_form(global_config, form): global_config['logger'].debug( 'Process Attribute Delete Form' ) season = form[attr_delete_season_label].value comp = form[attr_delete_comp_label].value team = form[attr_delete_team_number_label].value attr_name = form[attr_delete_attribute_name_label].value old_value = form[attr_delete_old_value_label].value # Initialize the database session connection db_name = global_config['db_name'] + global_config['this_season'] session = DbSession.open_db_session(db_name) attrdef_filename = WebCommonUtils.get_attrdef_filename(short_comp=comp) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) attr_def = attr_definitions.get_definition(attr_name) try: DataModel.deleteAttributeValue(session, team, comp+season, attr_name, old_value, attr_def) result = 'Scouting Data Attribute Value %s Successfully Removed From %s' % (old_value,attr_name) session.commit() except ValueError as reason: result = 'Error Removing Scouting Data Attribute Value %s From %s: %s' % (old_value,attr_name,reason) session.remove() return result
def get_data_from_first(global_config, year, event_code, query_str, table_to_parse=2): web.header('Content-Type', 'application/json') result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) try: first_url_str = 'http://www2.usfirst.org/%scomp/events/%s/%s.html' % (year,event_code.upper(),query_str) rank_data = urllib2.urlopen(first_url_str).read() my_parser = RankParser() rankings, _ = my_parser.parse( rank_data, table_to_parse ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response headings = rankings[0] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "%s" : [\n' % (query_str)) for line in rankings[1:]: result.append(' [ ') for i in range(0,len(headings)): if need_team_hyperlink(headings[i]): #result.append('"%s"' % (line[i])) result.append(('"<a href=\\"/teamdata/%s/'% competition)+line[i]+'\\">'+line[i]+'</a>"') else: #result.append('"%s": "%s"' % (headings[i].title(),line[i])) result.append('"%s"' % (line[i])) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') except: pass result.append(' ] }\n') return ''.join(result)
def get_team_list_json_from_tba(global_config, comp): global_config['logger'].debug( 'GET Team List For Competition From TBA %s', comp ) result = [] result.append('{ "teams" : ') event_code = WebCommonUtils.map_comp_to_event_code(comp) season = WebCommonUtils.map_comp_to_season(comp) url_str = '/api/v2/event/%s%s/teams' % (season,event_code.lower()) try: # retrieve the string itself as a formatted json string event_data = TbaIntf.get_from_tba(url_str) except: event_data = '[ ]' result.append( event_data ) result.append(' }\n') return ''.join(result)
def get_team_list_json_from_tba(global_config, comp): global_config['logger'].debug( 'GET Team List For Competition From TBA %s', comp ) web.header('Content-Type', 'application/json') result = [] result.append('{ "teams" : ') event_code = WebCommonUtils.map_comp_to_event_code(comp) season = WebCommonUtils.map_comp_to_season(comp) url_str = 'http://www.thebluealliance.com/api/v2/event/%s%s/teams?X-TBA-App-Id=frc1073:scouting-system:v01' % (season,event_code) event_data = '' try: event_data = urllib2.urlopen(url_str).read() except: event_data = '[ ]' pass result.append( event_data ) result.append(' }\n') return ''.join(result)
def get_event_rank_list_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rank List Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) rankings = rankings_data.get('rankings', []) if len(rankings): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : [\n') for team_rank in rankings: result.append(' { "rank": %d, "team_number": %s, "status": "available" }' % (team_rank['rank'],team_rank['team_key'].replace('frc',''))) result.append(',\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/ranklist.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/ranklist.json' % (competition), 'text', json_str) except: raise return json_str
def get_team_list_json(global_config, season, event, store_json_file=False): global team_info_dict global_config['logger'].debug( 'GET Team List For Competition %s', event ) comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "teams" : [\n') ''' team_list = DataModel.getTeamsInNumericOrder(session, comp) for team in team_list: team_info = None # TODO - Remove this hardcoded number for the valid team number. This check prevents # requesting information for invalid team numbers, which has been known to happen when # tablet operators enter bogus team numbers by mistake if team.team < 10000: team_info = DataModel.getTeamInfo(session, int(team.team)) if team_info: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,team_info.nickname)) result.append(',\n') else: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,'Unknown')) result.append(',\n') if len(team_list) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) else: ''' json_str = get_team_list_json_from_tba(global_config, comp) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'teams'), 'text', json_str) except: raise session.remove() return json_str
def get_team_event_list_from_tba(global_config, team, season): global_config['logger'].debug( 'GET Team Event List TBA' ) result = [] url_str = '/api/v2/team/frc%s/%s/events' % (team,season) try: event_dict = TbaIntf.get_from_tba_parsed(url_str) for event in event_dict: comp = WebCommonUtils.map_event_code_to_comp(event['event_code'], season) result.append(comp) except: pass return result
def process_attr_def_form(global_config, form): global_config['logger'].debug( 'Process Attribute Definitions Form' ) attrdef_filename = WebCommonUtils.get_attrdef_filename(global_config['this_competition']) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) attr_dict = attr_definitions.get_definitions() for key, attr_def in sorted(attr_dict.items()): attr_def['Weight'] = form[key].value attr_definitions.write_attr_overrides(); competition = global_config['this_competition'] + global_config['this_season'] if competition == None: raise Exception( 'Competition Not Specified!') DataModel.recalculate_scoring(global_config, competition, attr_definitions)
def get_team_attributes_page(global_config): global_config['logger'].debug( 'GET Team Attributes' ) session = DbSession.open_db_session(global_config['db_name'] + global_config['this_season']) comp = global_config['this_competition'] + global_config['this_season'] attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) web.header('Content-Type', 'application/json') result = [] result.append('{ "attributes": [\n') team_rankings = DataModel.getTeamsInRankOrder(session, comp) for team_entry in team_rankings: result.append("{ 'Team': " + str(team_entry.team)) result.append(", 'Score': " + '%.2f' % team_entry.score ) team_attributes = DataModel.getTeamAttributesInOrder(session, team_entry.team, comp) for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) if attr_def: weight = int(float(attr_def['Weight'])) if weight != 0: result.append( ", '" + attribute.attr_name + "': ") if ( attr_def['Statistic_Type'] == 'Total'): #result.append( str(attribute.cumulative_value) ) result.append( DataModel.mapValueToString(attribute.cumulative_value, attribute.all_values, attr_def, True) ) elif ( attr_def['Statistic_Type'] == 'Average'): #result.append( str(attribute.avg_value) ) result.append( DataModel.mapValueToString(attribute.avg_value, attribute.all_values, attr_def, True) ) else: #result.append( str(attribute.attr_value) ) result.append( DataModel.mapValueToString(attribute.attr_value, attribute.all_values, attr_def, True) ) result.append(' }') result.append(',\n') if len(team_rankings) > 0: result = result[:-1] result.append('\n') result.append(']}') session.remove() return ''.join(result)
def create_picklist_json(global_config, comp=None, store_json_file=False): global_config['logger'].debug( 'Create Picklist Json' ) global local_picklist store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "picklist": [\n') local_picklist = DataModel.getTeamsInRankOrder(session, comp, True) rank = 1 for team in local_picklist: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: row = '{ "rank" : %d, "team" : %d, "score" : %d, "competition" : "%s" }' % (rank, team.team, int(team.score), team.competition) result.append(row) result.append(',\n') rank += 1 if len(result) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'picklist'), 'text', json_str) except: raise session.remove() return json_str
def get_attr_def_form(global_config): global_config['logger'].debug( 'GET Attribute Definitions Form' ) form = attrdef_form() attrdef_filename = WebCommonUtils.get_attrdef_filename(global_config['this_competition']) if attrdef_filename is not None: attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) attr_dict = attr_definitions.get_definitions() for key, attr_def in sorted(attr_dict.items()): try: weight = float(attr_def['Weight']) except: weight = 0.0 form[key].value = str(int(weight)) return form
def do_account_disabled(global_config): auth = web.ctx.env.get('HTTP_AUTHORIZATION') if auth is None: raise web.seeother('/login') else: auth = re.sub('^Basic ','',auth) username,password = base64.decodestring(auth).split(':') logged_out_users[username] = True web.header('Cache-Control','no-cache') web.header('Pragma','no-cache') result = '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">' result += '<html>' result += WebCommonUtils.get_html_head() result += '<body>' result += username + ' account is disabled, click <a href="/login">Here</a> once account is re-enabled' result += '</body>' result += '</head>' result += '</html>' return result
def get_team_attr_rankings_page(global_config, comp, attr_name): global_config['logger'].debug( 'GET Team Attribute Rankings' ) session = DbSession.open_db_session(global_config['db_name'] + global_config['this_season']) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) attr = attr_definitions.get_definition(attr_name) try: stat_type = attr['Statistic_Type'] except: stat_type = 'Total' web.header('Content-Type', 'application/json') result = [] result.append('{ "rankings": [\n') if stat_type == 'Average': team_rankings = DataModel.getTeamAttributesInAverageRankOrder(session, comp, attr_name, False) else: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name, False) for team in team_rankings: if stat_type == 'Average': value = int(team.cumulative_value/team.num_occurs) else: value = int(team.cumulative_value) data_str = '{ "team": %d, "value": %d }' % (team.team,value) result.append(data_str) result.append(',\n') if len(team_rankings) > 0: result = result[:-1] result.append('\n') result.append(']}') session.remove() return ''.join(result)
def get_team_attr_rankings_json(global_config, comp=None, attr_name=None): global_config['logger'].debug( 'GET Team Attribute Rankings Json' ) store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions() attr_definitions.parse(attrdef_filename) attr_def = attr_definitions.get_definition(attr_name) try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' web.header('Content-Type', 'application/json') result = [] result.append('{ "attr_name" : "%s",\n' % attr_name) # add the columns bases on the attribute definition type result.append(' "columns" : [\n') result.append(' { "sTitle": "Team" }') result.append(',\n') columns = [] if attr_def['Type'] == 'Map_Integer': map_values = attr_def['Map_Values'].split(':') for map_value in map_values: item_name = map_value.split('=')[0] columns.append(item_name) result.append(' { "sTitle": "%s" }' % item_name) result.append(',\n') result = result[:-1] result.append('\n') result.append(' ],\n') if stat_type == 'Average': team_rankings = DataModel.getTeamAttributesInAverageRankOrder(session, comp, attr_name) else: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name) result.append(' "rankings" : [\n') for team_attr in team_rankings: data_str = ' [ %d,' % team_attr.team value_dict = DataModel.mapAllValuesToDict(attr_def, team_attr.all_values) for column in columns: try: value = value_dict[column] except: value = 0 data_str += ' %d,' % value data_str = data_str.rstrip(',') data_str += ' ]' result.append(data_str) result.append(',\n') if len(team_rankings) > 0: result = result[:-1] result.append('\n') result.append(' ]\n}') json_str = ''.join(result) if store_data_to_file is True: try: file_name = 'attrrankings_%s' % attr_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def get_event_stats_json(global_config, year, event_code, stat_type): global_config['logger'].debug( 'GET Event Results Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) event_stats = get_event_data_from_tba( '%s%s/oprs' % (year,event_code.lower()) ) if len(event_stats): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Team', stat_type.rstrip('s').upper() ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "stats" : [\n') try: stats_dict = event_stats[stat_type] for key, value in stats_dict.iteritems(): result.append( ' ["%s", %.2f' % (get_team_hyperlink( competition, key.lstrip('frc') ),value) ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') except: global_config['logger'].debug( 'No Statistics Data For %s' % stat_type ) result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type), 'text', json_str) except: raise return json_str
def get_event_matchschedule_json(global_config, year, event_code, team_str = None): global_config['logger'].debug( 'GET Event Schedule Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()) ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) match_schedule = dict() match_schedule['event'] = event_code.lower() match_schedule['columns'] = [ 'Round', 'Match', 'Red_1', 'Red_2', 'Red_3', 'Blue_1', 'Blue_2', 'Blue_3' ] match_schedule['qualification'] = [] match_schedule['quarter_finals'] = [] match_schedule['semi_finals'] = [] match_schedule['finals'] = [] if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: comp_level = match['comp_level'] if comp_level in ('qf', 'sf'): match_str = '%s-%s' % (match['set_number'],match['match_number']) else: match_str = str(match['match_number']) match_entry = [ comp_level, match_str, match['alliances']['red']['team_keys'][0].lstrip('frc'), match['alliances']['red']['team_keys'][1].lstrip('frc'), match['alliances']['red']['team_keys'][2].lstrip('frc'), match['alliances']['blue']['team_keys'][0].lstrip('frc'), match['alliances']['blue']['team_keys'][1].lstrip('frc'), match['alliances']['blue']['team_keys'][2].lstrip('frc') ] if comp_level == 'qm': match_schedule['qualification'].append(match_entry) elif comp_level == 'qf': match_schedule['quarter_finals'].append(match_entry) elif comp_level == 'sf': match_schedule['semi_finals'].append(match_entry) elif comp_level == 'f': match_schedule['finals'].append(match_entry) store_data_to_file = True # the qualification match schedule needs to be sorted, the sort will be done by the second # element of each row, which is the match number match_schedule['qualification'].sort(key=lambda match_list: int(match_list[1])) else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename) ) if stored_file_data != '': return stored_file_data json_str = json.dumps(match_schedule) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename), 'text', json_str) except: raise return json_str
def get_event_matchresults_json(global_config, year, event_code, round_str, team_str = None): global_config['logger'].debug( 'GET Event Results Json' ) if round_str == 'qual': match_selector = 'qm' elif round_str == 'quarters': match_selector = 'qf' elif round_str == 'semis': match_selector = 'sf' elif round_str == 'finals': match_selector = 'f' # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()), api_version='v3' ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Match', 'Start Time', 'Red 1', 'Red 2', 'Red 3', 'Blue 1', 'Blue 2', 'Blue 3', 'Red Score', 'Blue Score' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "matchresults" : [\n') # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: if str(match['comp_level']) == match_selector: result.append(' [ ') # Match number try: if global_config['json_no_links'] == 'Yes': result.append( '"%s", ' % str(match['match_number']) ) else: raise except: result.append( '"%s", ' % get_match_hyperlink(competition, match) ) # Match start time match_epoch_time = int(match['time']) time_format_str = '%a %b %d - %I:%M %p' match_time_str = datetime.datetime.fromtimestamp(match_epoch_time).strftime(time_format_str) result.append( '"%s", ' % match_time_str ) try: if global_config['json_no_links'] == 'Yes': # Red alliance teams result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) # Blue alliance teams result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) else: raise except: # Red alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) ) # Blue alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) ) # Red alliance score score = str(match['alliances']['red']['score']) if score == '-1': score = '-' result.append( '"%s", ' % score ) # Blue alliance score score = str(match['alliances']['blue']['score']) if score == '-1': score = '-' result.append( '"%s" ' % score ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename), 'text', json_str) except: raise return json_str
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] tba_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if tba_data: rankings = tba_data.get('rankings') if rankings is not None: result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Rank', 'Team', 'Record', 'Matches_Played', 'Dq' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings: result.append(' [ ') for item in headings: key = item.lower() if key == 'record': result.append('"%s-%s-%s"' % (str(line[key]['wins']),str(line[key]['losses']),str(line[key]['ties']))) elif key == 'team': team_str = line['team_key'].replace('frc','') result.append(('"<a href=\\"/teamdata/%s/'% competition)+team_str+'\\">'+team_str+'</a>"') else: result.append('"%s"' % (str(line[key]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str
def process_debrief_files(global_config, input_dir, recursive=True): # Initialize the database session connections issues_db_name = global_config['issues_db_name'] + global_config['this_season'] debrief_db_name = global_config['debriefs_db_name'] + global_config['this_season'] debrief_session = DbSession.open_db_session(debrief_db_name) issues_session = DbSession.open_db_session(issues_db_name) # Create the database if it doesn't already exist #if not os.path.exists('./' + db_name): # DebriefDataModel.create_db_tables(my_db) # The following regular expression will select all files that conform to # the file naming format Debrief*.txt. Build a list of all datafiles that match # the naming format within the directory passed in via command line # arguments. file_regex = re.compile('Debrief[a-zA-Z0-9_-]+.txt') files = get_files(global_config, debrief_session, debrief_db_name, input_dir, file_regex, recursive) # Process data files for data_filename in files: print 'processing %s'%data_filename try: # Initialize the debrief_attributes dictionary in preparation for the # parsing of the data file debrief_attributes = {} # Parse the data file, storing all the information in the attributes # dictionary FileParser.FileParser(data_filename).parse(debrief_attributes) DebriefDataModel.addDebriefFromAttributes(debrief_session, debrief_attributes) # Also, extract the competition name, too, if it has been included in # the data file if debrief_attributes.has_key('Competition'): competition = debrief_attributes['Competition'] issue_base_name = WebCommonUtils.split_comp_str(competition)[0] else: competition = global_config['this_competition'] + global_config['this_season'] issue_base_name = global_config['this_competition'] if competition == None: raise Exception( 'Competition Not Specified!') # At competition, we will likely have multiple laptops manging the data, but we want # only one machine to be responsible for the issues database. In all likelihood, # that machine will be the one in the pits, or possibly the application running # in the cloud. if global_config['issues_db_master'] == 'Yes': match_id = debrief_attributes['Match'] submitter = debrief_attributes['Scouter'] timestamp = str(int(time.time())) subgroup = 'Unassigned' status = 'Open' owner = 'Unassigned' if debrief_attributes.has_key('Issue1_Summary') or debrief_attributes.has_key('Issue1_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue1' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue1_Summary'): summary = debrief_attributes['Issue1_Summary'] else: summary = debrief_attributes['Issue1_Description'] if debrief_attributes.has_key('Issue1_Description'): description = debrief_attributes['Issue1_Description'] else: description = debrief_attributes['Issue1_Summary'] if debrief_attributes.has_key('Issue1_Priority'): priority = debrief_attributes['Issue1_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue1_Taskgroup'): component = debrief_attributes['Issue1_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) if debrief_attributes.has_key('Issue2_Summary') or debrief_attributes.has_key('Issue2_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue2' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue2_Summary'): summary = debrief_attributes['Issue2_Summary'] else: summary = debrief_attributes['Issue2_Description'] if debrief_attributes.has_key('Issue2_Description'): description = debrief_attributes['Issue2_Description'] else: description = debrief_attributes['Issue2_Summary'] if debrief_attributes.has_key('Issue2_Priority'): priority = debrief_attributes['Issue2_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue2_Taskgroup'): component = debrief_attributes['Issue2_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) if debrief_attributes.has_key('Issue3_Summary') or debrief_attributes.has_key('Issue3_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue3' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue3_Summary'): summary = debrief_attributes['Issue3_Summary'] else: summary = debrief_attributes['Issue3_Description'] if debrief_attributes.has_key('Issue3_Description'): description = debrief_attributes['Issue3_Description'] else: description = debrief_attributes['Issue3_Summary'] if debrief_attributes.has_key('Issue3_Priority'): priority = debrief_attributes['Issue3_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue3_Taskgroup'): component = debrief_attributes['Issue3_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) except Exception, e: # log the exception but continue processing other files log_exception(global_config['logger'], e) # add the file to the set of processed files so that we don't process it again. Do it outside the # try/except block so that we don't try to process a bogus file over and over again. DebriefDataModel.addProcessedFile(debrief_session, data_filename)
def get_team_datafiles_page(global_config, name, display_notes=True): global_config['logger'].debug( 'GET Team Data Files: %s', name ) if global_config['attr_definitions'] == None: return None session = DbSession.open_db_session(global_config['db_name'] + global_config['this_season']) page='' team_info = DataModel.getTeamInfo(session, int(name)) if team_info: page += '<h3>Team Info</h3>' page += '<li>Team Nickname: ' + team_info.nickname + '</li>' page += '<li>Affiliation: ' + team_info.fullname + '</li>' page += '<li>Location: ' + team_info.location + '</li>' page += '<li>Rookie Season: ' + str(team_info.rookie_season) + '</li>' page += '<li>Website: <a href="' + team_info.website + '">' + team_info.website + '</a></li>' page += '<br>' competitions = [] this_comp = global_config['this_competition'] season = global_config['this_season'] competitions.append(this_comp+season) competitions_str = global_config['other_competitions'] competitions_str = competitions_str.replace(this_comp,'') if competitions_str.count(',') > 0: other_comps = competitions_str.split(',') for other_comp in other_comps: if other_comp != '': competitions.append(other_comp+season) elif competitions_str != '': competitions.append(competitions_str+season) for comp in competitions: if comp != '': attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) input_dir = './static/data/' + comp + '/ScoutingData/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.txt' datafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) input_dir = './static/data/' + comp + '/ScoutingPictures/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.jpg|mp4' mediafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) if len(datafiles) == 0 and len(mediafiles) == 0: continue page += '<hr>' page += '<h3> ' + comp + '</h3>' team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) if len(team_attributes) > 0: page += '<ul>' page += '<h3>Scouting Data Summary:</h3>' page += '<ul>' page += '<table border="1" cellspacing="5">' page += '<tr>' page += '<th>Attribute Name</th>' page += '<th>Matches</th>' page += '<th>Cumulative Value</th>' page += '<th>Average Value</th>' #page += '<th>Last Value</th>' page += '<th>All Values</th>' page += '</tr>' for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) include_attr = False if attr_def: if attr_def.has_key('Include_In_Team_Display') \ and attr_def['Include_In_Team_Display'] == 'Yes': include_attr = True elif attr_def.has_key('Include_In_Report') \ and attr_def['Include_In_Report'] == 'Yes': include_attr = True elif attr_def.has_key('Weight') \ and attr_def['Weight'] != '0': include_attr = True if include_attr == True: page += '<tr>' if attr_def.has_key('Display_Name'): page += '<td>%s</td>' % attr_def['Display_Name'] else: page += '<td>%s</td>' % attr_def['Name'] page += '<td>%s</td>' % str(attribute.num_occurs) page += '<td>%s</td>' % str(attribute.cumulative_value) page += '<td>%0.2f</td>' % (attribute.avg_value) #page += '<td>%s</td>' % str(attribute.attr_value) page += '<td>%s</td>' % attribute.all_values page += '</tr>' page += '</table>' page += '</ul>' page += '</ul>' if len(datafiles) > 0: page += '<ul>' page += '<h3>Pit and Match Data:</h3>' page += '<ul>' for filename in datafiles: segments = filename.split('/') basefile = segments[-1] # the following line inserts a hyperlink to the file itself, the second line # inserts a hyperlink to a url that allows the webserver to create a nicer display of # the file contents #page += '<li><a href="' + filename.lstrip('.') + '">' + basefile + '</a></li>' page += '<li><a href="' + '/ScoutingData/' + comp + '/' + basefile + '">' + basefile + '</a></li>' page += '</ul>' if len(mediafiles) > 0: page += '<h3>Pictures and Videos:</h3>' page += '<ul>' for filename in mediafiles: segments = filename.split('/') basefile = segments[-1] page += '<li><a href="' + filename.lstrip('.') + '">' + basefile + '</a></li>' page += '</ul>' page += '</ul>' if display_notes == True: page += '<hr>' page += '<h3> Notes for Team ' + name + '</h3>' page += '<ul>' comp = global_config['this_competition'] + global_config['this_season'] team_notes = DataModel.getTeamNotes(session, name, comp) for note in team_notes: page += '<li>' + note.data + '</li>' page += '</ul>' session.remove() return page
def get_team_rankings_json(global_config, season, event, attr_filters=[], filter_name=None, thumbnails = False, store_json_file=False): global_config['logger'].debug( 'GET Team Rankings Json' ) store_data_to_file = False comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "rankings": [\n') rank_added = False if len(attr_filters) == 0: team_rankings = DataModel.getTeamsInRankOrder(session, comp, False) for team in team_rankings: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team.team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (team.score, comp, team.team,thumbnails_snippet)) result.append(',\n') rank_added = True else: # we'll need the attribute definitions in order to retrieve the correct attribute value # and attribute weighting attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) team_rank_dict = dict() for attr_filter in attr_filters: try: attr_name, attr_value = attr_filter.split('=') except: attr_name = attr_filter attr_value = None attr_def = attr_definitions.get_definition(attr_name) if attr_value is None: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name, False) for team in team_rankings: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if stat_type == 'Average': score = int(team.cumulative_value/team.num_occurs*weight) else: score = int(team.cumulative_value*weight) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score else: team_rankings = DataModel.getTeamAttributesWithValue(session, comp, attr_name, attr_value, False) for team in team_rankings: score = team.all_values.count(attr_value) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score sorted_team_rank = sorted(team_rank_dict.items(), key=operator.itemgetter(1)) for team, score in sorted_team_rank: # round the score to an integer value score = float(int(score)) if score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (score, comp, team, thumbnails_snippet)) result.append(',\n') rank_added = True if rank_added == True: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: if filter_name is None: file_name = 'scoutingrankings' else: file_name = 'scoutingrankings_%s' % filter_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str