def get_team_score_json(global_config, name, comp, store_json_file=False): global_config['logger'].debug( 'GET Team %s Score For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s", ' % (comp,name)) team_scores = DataModel.getTeamScore(session, name, comp) if len(team_scores)==1: result.append('"score": "%s" }' % team_scores[0].score) else: result.append(' "score": [') for score in team_scores: result.append(score.json()) result.append(',\n') if len(team_scores) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_score.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_geo_location_json(include_teams=True, include_events=True): geo_locations = {} filename = "geo_coordinates_for" my_config = ScoutingAppMainWebServer.global_config session = DbSession.open_db_session(my_config["db_name"] + my_config["this_season"]) if include_events: filename += "_Events" events = session.query(DataModel.EventInfo).all() for event in events: if event.geo_location is not None: geo_locations[event.event_key] = json.loads(event.geo_location) if include_teams: filename += "_Teams" teams = session.query(DataModel.TeamInfo).all() for team in teams: if team.geo_location is not None: team_key = "frc%d" % team.team geo_locations[team_key] = json.loads(team.geo_location) geo_location_json = json.dumps(geo_locations) geo_location_js = "var %s = '%s';" % (filename, geo_location_json) # store the geo location information to a file, too try: FileSync.put(my_config, "GlobalData/%s.json" % (filename), "text", geo_location_json) FileSync.put(my_config, "GlobalData/%s.js" % (filename), "text", geo_location_js) except: raise return geo_location_json
def get_team_scouting_notes_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Notes For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_notes" : [\n') team_notes = DataModel.getTeamNotes(session, name, comp) for note in team_notes: result.append(' { "tag": "%s", "note": "%s" }' % (note.tag,note.data)) result.append(',\n') if len(team_notes) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_notes.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_team_info_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Info', name ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) team_info = DataModel.getTeamInfo(session, int(name)) if team_info is None: json_str = '' else: result = [] result.append('{ "team": "%s", "team_data" : [\n' % name) result.append(' { "name": "%s", "value": "%s" }' % ('nickname', team_info.nickname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('affiliation', team_info.fullname)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('location', team_info.location)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('rookie_season', team_info.rookie_season)) result.append(',\n') result.append(' { "name": "%s", "value": "%s" }' % ('website', team_info.website)) result.append('\n') result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_teaminfo.json' % (comp,name), 'text', json_str) except: raise return json_str
def get_team_participation_json(): team_participation = {} filename = "team_participation" my_config = ScoutingAppMainWebServer.global_config session = DbSession.open_db_session(my_config["db_name"] + my_config["this_season"]) teams = session.query(DataModel.TeamInfo).all() for team in teams: team_key = "frc%d" % team.team if team.first_competed is not None: info = {} info["first_competed"] = team.first_competed info["last_competed"] = team.last_competed team_participation[team_key] = info team_participation_json = json.dumps(team_participation) team_participation_js = "var %s = '%s';" % (filename, team_participation_json) # store the geo location information to a file, too try: FileSync.put(my_config, "GlobalData/%s.json" % (filename), "text", team_participation_json) FileSync.put(my_config, "GlobalData/%s.js" % (filename), "text", team_participation_js) except: raise return team_participation_json
def get_team_score_breakdown_json(global_config, name, comp=None, store_json_file=False): global_config['logger'].debug( 'GET Team Score Breakdown: %s', name ) if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) result = [] result.append('{ "score_breakdown": [\n') team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) if attr_def: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if weight != 0: if stat_type == 'Average': value = int(attribute.cumulative_value/attribute.num_occurs) else: value = int(attribute.cumulative_value) data_str = '{"attr_name": "%s", "raw_score": %d, "weighted_score": %d}' % (attribute.attr_name,int(value),int(weight*value)) result.append(data_str) result.append(',\n') if len(team_attributes) > 0: result = result[:-1] result.append('\n') result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_scorebreakdown.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str
def get_team_scouting_mediafiles_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Mediafiles For Competition %s', name, comp ) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_mediafiles" : [\n') input_dir = './static/data/' + comp + '/ScoutingPictures/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.jpg|mp4' mediafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) for filename in mediafiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if len(mediafiles) > 0: result = result[:-1] result.append(' ],\n') result.append(' "thumbnailfiles" : [\n') ImageFileUtils.create_thumbnails(mediafiles) thumbnail_dir = input_dir + "Thumbnails/" pattern = '[0-9]*x[0-9]*_Team' + name + '_' + '[a-zA-Z0-9_]*.jpg|mp4' thumbnailfiles = get_datafiles(thumbnail_dir, re.compile(pattern), False, global_config['logger']) for filename in thumbnailfiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if len(thumbnailfiles) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_mediafiles.json' % (comp,name), 'text', json_str) except: raise return json_str
def get_event_rank_list_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rank List Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) #return get_data_from_first(global_config, year, event_code, 'rankings') store_data_to_file = False result = [] rankings_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) rankings = rankings_data.get('rankings', []) if len(rankings): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : [\n') for team_rank in rankings: result.append(' { "rank": %d, "team_number": %s, "status": "available" }' % (team_rank['rank'],team_rank['team_key'].replace('frc',''))) result.append(',\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/ranklist.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/ranklist.json' % (competition), 'text', json_str) except: raise return json_str
def get_team_list_json(global_config, season, event, store_json_file=False): global team_info_dict global_config['logger'].debug( 'GET Team List For Competition %s', event ) comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "teams" : [\n') ''' team_list = DataModel.getTeamsInNumericOrder(session, comp) for team in team_list: team_info = None # TODO - Remove this hardcoded number for the valid team number. This check prevents # requesting information for invalid team numbers, which has been known to happen when # tablet operators enter bogus team numbers by mistake if team.team < 10000: team_info = DataModel.getTeamInfo(session, int(team.team)) if team_info: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,team_info.nickname)) result.append(',\n') else: result.append(' { "team_number": "%s", "nickname": "%s" }' % (team.team,'Unknown')) result.append(',\n') if len(team_list) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) else: ''' json_str = get_team_list_json_from_tba(global_config, comp) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'teams'), 'text', json_str) except: raise session.remove() return json_str
def get_team_datafile_json(global_config, filename, store_json_file=False): global_config['logger'].debug( 'GET Team Data File Json: %s', filename ) comp, fname = filename.split('/', 1) filepath = './static/data/' + comp + '/ScoutingData/' + fname datafile = open( filepath, "r" ) team = fname.split('_')[0].lstrip('Team') result = [] result.append('{ "competition": "%s",\n' % comp) result.append(' "team": "%s",\n' % team) result.append(' "filename": "%s",\n' % fname) result.append(' "scouting_data": [\n') while 1: lines = datafile.readlines(500) if not lines: break for line in lines: line = line.rstrip('\n') try: name, value = line.split(':',1) except: pass result.append(' { "name": "%s", "value": "%s" }' % (name,value)) result.append(',\n') if len(lines) > 0: result = result[:-1] result.append('] }\n') json_str = ''.join(result) if store_json_file is True: try: short_fname = fname.replace('.txt','') FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_file_%s.json' % (comp,team,short_fname), 'text', json_str) except: raise return json_str
def create_picklist_json(global_config, comp=None, store_json_file=False): global_config['logger'].debug( 'Create Picklist Json' ) global local_picklist store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "picklist": [\n') local_picklist = DataModel.getTeamsInRankOrder(session, comp, True) rank = 1 for team in local_picklist: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: row = '{ "rank" : %d, "team" : %d, "score" : %d, "competition" : "%s" }' % (rank, team.team, int(team.score), team.competition) result.append(row) result.append(',\n') rank += 1 if len(result) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'picklist'), 'text', json_str) except: raise session.remove() return json_str
def get_saved_filter_json( global_config, filter_name, store_data_to_file=False ): global saved_filters competition = global_config['this_competition'] + global_config['this_season'] if len(saved_filters) == 0: ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' ) result = [] result.append('{ "filters": [\n') if filter_name != None: try: filter_name = filter_name.title() filter_str = saved_filters[filter_name] except: pass result.append(' { "name": "%s", "filter_str": "%s" }\n' % (filter_name,filter_str)) else: for name, filter_str in saved_filters.iteritems(): result.append(' { "name": "%s", "filter_str": "%s" }' % (name,filter_str)) result.append(',\n') if len(saved_filters) > 0: result = result[:-1] result.append('] }\n') json_str = ''.join(result) if store_data_to_file: try: if filter_name == None: file_name = 'attrfilters.json' else: file_name = 'attrfilter_%s.json' % filter_name FileSync.put( global_config, '%s/EventData/%s' % (competition,file_name), 'text', json_str) except: raise return json_str
def update_picklist_json(global_config, from_position, to_position, comp=None, store_json_file=True): global_config['logger'].debug( 'Create Picklist Json' ) global local_picklist if local_picklist is None: create_picklist_json(global_config, comp, store_json_file=True) result = [] result.append('{ "picklist": [\n') if comp == None: comp = global_config['this_competition'] + global_config['this_season'] item_to_update = local_picklist.pop( from_position-1 ) local_picklist.insert(to_position-1, item_to_update) rank = 1 for team in local_picklist: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: row = '{ "rank" : %d, "team" : %d, "score" : %d, "competition" : "%s" }' % (rank, team.team, int(team.score), team.competition) result.append(row) result.append(',\n') rank += 1 if len(result) > 0: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/%s.json' % (comp,'picklist'), 'text', json_str) except: raise return json_str
def get_team_scouting_datafiles_json(global_config, comp, name, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Datafiles For Competition %s', name, comp ) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_datafiles" : [\n') input_dir = './static/data/' + comp + '/ScoutingData/' pattern = 'Team' + name + '_' + '[a-zA-Z0-9_]*.txt' datafiles = get_datafiles(input_dir, re.compile(pattern), False, global_config['logger']) for filename in datafiles: segments = filename.split('/') basefile = segments[-1] result.append(' { "filename": "%s" }' % (basefile)) result.append(',\n') if store_json_file is True: get_team_datafile_json( global_config, comp + '/' + basefile, store_json_file ) if len(datafiles) > 0: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_datafiles.json' % (comp,name), 'text', json_str) except: raise return json_str
result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True except Exception, err: print 'Caught exception:', err except: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/%s%s.json' % (competition,query_str,round_str) ) if stored_file_data != '': return stored_file_data result.append(' ] }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/%s%s.json' % (competition,query_str,round_str), 'text', json_str) except: raise return json_str def update_event_data_files( global_config, year, event, directory ):
def process_json_files(global_config, competition, output_file, input_dir, reprocess_files=False): # Initialize the database session connection db_name = global_config['db_name'] + global_config['this_season'] session = DbSession.open_db_session(db_name) # get all the verified files from the input directory. These files are # candidates to be processed verified_files = FileSync.get_file_list(input_dir, ext='.verified', recurse=True ) verified_files.sort(key=match_sort) # For the normal case, get all the processed files, too. We'll use the processed list to # determine which files are actually newly verified and need to be processed. If the # reprocess flag is true, then we'll process all verified files. if reprocess_files is not True: processed_files = FileSync.get_file_list(input_dir, ext='.processed', recurse=True ) for processed_file in processed_files: verified_files.remove( processed_file.replace('processed','verified') ) xlsx_workbook = None excel_intf_ctrl = global_config.get('excel_sheets_intf', 'Disabled') if excel_intf_ctrl == 'Enabled': # read in the output file, which is expected to be an XLSX file try: xlsx_workbook = openpyxl.load_workbook(output_file) except: print 'Error Reading Spreadsheet %s For Input' % output_file google_intf_ctrl = global_config.get('google_sheets_intf', 'Disabled') ''' # took out for now until we have local dictionary storage events = global_config.get('events') if events is None: events = {} global_config['events'] = events event_data = events.get( competition ) if event_data is None: events[competition] = { 'ScoutingData': { 'TeamData': {} } } event_data = events[competition] event_scouting_data = event_data['ScoutingData']['TeamData'] ''' for verified_file in verified_files: filename = verified_file.split('/')[-1] # read the file into a dictionary with open(input_dir+verified_file) as fd: scouting_data = json.load(fd) if filename.startswith('Match'): team = scouting_data['Setup'].get('Team') category = 'Match' elif filename.startswith('Pit'): team = scouting_data['Pit'].get('Team') category = 'Pit' else: category = 'Unknown' if team is not None and len(team) > 0: # ######################################################### # # store the scouting data to the local database DataModel.addTeamToEvent(session, int(team), competition) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) for section_name, section_data in scouting_data.iteritems(): if isinstance(section_data,dict): for attr_name, attr_value in section_data.iteritems(): # use the attribute definitions to control whether information gets # stored to the database rather than the hard coded stuff here. # also need to consider the section/category name as the attributes # and definitions are processed # don't store the team number in the database if attr_name == 'Team': continue # augment the attribute name with the section name in order to make the attribute # unique attr_name = '%s:%s' % (section_name, attr_name) attribute_def = {} attribute_def['Name'] = attr_name if attr_value.isdigit(): attribute_def['Type'] = 'Integer' attribute_def['Weight'] = 1.0 else: attribute_def['Type'] = 'String' attribute_def['Weight'] = 0.0 attribute_def['Statistic_Type'] = 'Average' attr_definitions.add_definition(attribute_def) try: DataModel.createOrUpdateAttribute(session, int(team), competition, category, attr_name, attr_value, attribute_def) except Exception, exception: traceback.print_exc(file=sys.stdout) exc_type, exc_value, exc_traceback = sys.exc_info() exception_info = traceback.format_exception(exc_type, exc_value,exc_traceback) for line in exception_info: line = line.replace('\n','') global_config['logger'].debug(line) else: print 'Unexpected entry in scouting data file, name: %s, value: %s' % (section_name,section_data) score = DataModel.calculateTeamScore(session, int(team), competition, attr_definitions) DataModel.setTeamScore(session, int(team), competition, score) session.commit() # ######################################################### # # Google spreadsheet update if google_intf_ctrl == 'Enabled': row_data = [] for section_name, section_data in scouting_data.iteritems(): if isinstance(section_data,dict): for attr_name, attr_value in section_data.iteritems(): # augment the attribute name with the section name in order to make the attribute # unique attr_name = '%s_%s' % (section_name, attr_name) row_data.append((attr_name,attr_value)) else: print 'Unexpected entry in scouting data file, name: %s, value: %s' % (section_name,section_data) sheet_name = '%s_%s_Data' % (competition,category) GoogleSheetsIntf.add_scouting_data_row( sheet_name, row_data ) # ######################################################### # # ######################################################### # ''' # ######################################################### # # store the scouting data to a local dictionary team_data = event_scouting_data.get(team) if team_data is None: team_data = { 'Summary': {}, 'MatchData': [] } event_scouting_data[team] = team_data team_match_data = team_data['MatchData'] # if this match has already been scored, then update the data by removing the existing # match data and then add the updated data update_match = False for match_data in team_match_data: if scouting_data['Setup']['Match'] == match_data['Setup']['Match']: update_match = True break if update_match is True: team_match_data.remove(match_data) team_match_data.append(scouting_data) # ######################################################### # ''' # ######################################################### # # store the match scouting data information to the spreadsheet if excel_intf_ctrl == 'Enabled' and xlsx_workbook is not None: if category == 'Match': team_name = 'Team %s' % team try: team_sheet = xlsx_workbook.get_sheet_by_name(team_name) except: team_sheet = create_team_sheet( xlsx_workbook, team_name ) curr_matches = team_sheet['B2'].value if curr_matches is None: curr_matches = 0 # get max row and column count and iterate over the sheet max_row= team_sheet.max_row for i in range(1,max_row+1): # scan for a row that has Match in the first column to identify rows where data will be stored cell_value = team_sheet.cell(row=i,column=1).value if team_sheet.cell(row=i,column=1).value == 'Match': attr_row = i data_row = i+1 data_cell = team_sheet.cell(row=i+1,column=1).value if data_cell is None: team_sheet = update_data_row( team_sheet, attr_row, data_row, scouting_data ) team_sheet['B2'].value = curr_matches+1 break elif data_cell == int(scouting_data['Setup']['Match']): # Update an existing row team_sheet = update_data_row( team_sheet, attr_row, data_row, scouting_data ) break # Jump over the next two rows i += 2 # ######################################################### # shutil.copyfile(input_dir+verified_file, input_dir+verified_file.replace('verified','processed')) if xlsx_workbook is not None: xlsx_workbook.save(output_file)
def get_team_attr_rankings_json(global_config, comp=None, attr_name=None): global_config['logger'].debug( 'GET Team Attribute Rankings Json' ) store_data_to_file = False if comp == None: comp = global_config['this_competition'] + global_config['this_season'] season = global_config['this_season'] else: season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions() attr_definitions.parse(attrdef_filename) attr_def = attr_definitions.get_definition(attr_name) try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' web.header('Content-Type', 'application/json') result = [] result.append('{ "attr_name" : "%s",\n' % attr_name) # add the columns bases on the attribute definition type result.append(' "columns" : [\n') result.append(' { "sTitle": "Team" }') result.append(',\n') columns = [] if attr_def['Type'] == 'Map_Integer': map_values = attr_def['Map_Values'].split(':') for map_value in map_values: item_name = map_value.split('=')[0] columns.append(item_name) result.append(' { "sTitle": "%s" }' % item_name) result.append(',\n') result = result[:-1] result.append('\n') result.append(' ],\n') if stat_type == 'Average': team_rankings = DataModel.getTeamAttributesInAverageRankOrder(session, comp, attr_name) else: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name) result.append(' "rankings" : [\n') for team_attr in team_rankings: data_str = ' [ %d,' % team_attr.team value_dict = DataModel.mapAllValuesToDict(attr_def, team_attr.all_values) for column in columns: try: value = value_dict[column] except: value = 0 data_str += ' %d,' % value data_str = data_str.rstrip(',') data_str += ' ]' result.append(data_str) result.append(',\n') if len(team_rankings) > 0: result = result[:-1] result.append('\n') result.append(' ]\n}') json_str = ''.join(result) if store_data_to_file is True: try: file_name = 'attrrankings_%s' % attr_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def get_team_rankings_json(global_config, season, event, attr_filters=[], filter_name=None, thumbnails = False, store_json_file=False): global_config['logger'].debug( 'GET Team Rankings Json' ) store_data_to_file = False comp = WebCommonUtils.map_event_code_to_comp(event, season) session = DbSession.open_db_session(global_config['db_name'] + season) result = [] result.append('{ "rankings": [\n') rank_added = False if len(attr_filters) == 0: team_rankings = DataModel.getTeamsInRankOrder(session, comp, False) for team in team_rankings: # round the score to an integer value team.score = float(int(team.score)) if team.score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team.team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (team.score, comp, team.team,thumbnails_snippet)) result.append(',\n') rank_added = True else: # we'll need the attribute definitions in order to retrieve the correct attribute value # and attribute weighting attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) team_rank_dict = dict() for attr_filter in attr_filters: try: attr_name, attr_value = attr_filter.split('=') except: attr_name = attr_filter attr_value = None attr_def = attr_definitions.get_definition(attr_name) if attr_value is None: team_rankings = DataModel.getTeamAttributesInRankOrder(session, comp, attr_name, False) for team in team_rankings: try: stat_type = attr_def['Statistic_Type'] except: stat_type = 'Total' weight = int(float(attr_def['Weight'])) if stat_type == 'Average': score = int(team.cumulative_value/team.num_occurs*weight) else: score = int(team.cumulative_value*weight) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score else: team_rankings = DataModel.getTeamAttributesWithValue(session, comp, attr_name, attr_value, False) for team in team_rankings: score = team.all_values.count(attr_value) try: team_rank_dict[team.team] += score except: team_rank_dict[team.team] = score sorted_team_rank = sorted(team_rank_dict.items(), key=operator.itemgetter(1)) for team, score in sorted_team_rank: # round the score to an integer value score = float(int(score)) if score > 0: thumbnails_snippet = '' if thumbnails: thumbnails_snippet = ',\n' + get_team_scouting_thumbnails_json_snippet(global_config, comp, str(team)) result.append( ' { "score": %0.1f, "competition": "%s", "team": %d%s }' % (score, comp, team, thumbnails_snippet)) result.append(',\n') rank_added = True if rank_added == True: result = result[:-1] result.append(']}') json_str = ''.join(result) if store_json_file is True: try: if filter_name is None: file_name = 'scoutingrankings' else: file_name = 'scoutingrankings_%s' % filter_name FileSync.put( global_config, '%s/EventData/%s.json' % (comp,file_name), 'text', json_str) except: raise session.remove() return json_str
def get_attr_tree_json(global_config, filter_name = None, store_data_to_file=False): global_config['logger'].debug( 'GET Attribute Definitions Tree JSON' ) attrdef_filename = './config/' + global_config['attr_definitions'] attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) competition = global_config['this_competition'] + global_config['this_season'] #categories = attr_definitions.get_sub_categories() attr_filter = get_saved_filter(filter_name) result = [] result.append('{ "item": [\n') if filter_name != None: checked = 0 else: checked = 1 opened = 1 result.append(' { "text": "%s", "open": %d, "checked": %d, "id": "Skip_%s", "item": [ \n' % ('All Attributes',opened,checked,'All')) category_dict = attr_definitions.get_attr_dict_by_category() for category, attrlist in category_dict.iteritems(): if category != 'Uncategorized': result.append(' { "text": "%s", "checked": %d, "id": "Skip_%s", "item": [ \n' % (category,checked,category)) for attrname in sorted(attrlist): checked_ind = 0 if filter_name is None: # if there is no specified filter, then set the checked indicator based on the overall setting checked_ind = checked else: # otherwise, if a filter is specified, then set the checked indicator based on if the attribute # name is specified in the filter list if attrname in attr_filter: checked_ind = 1 tree_item_str = get_attr_def_item_json( global_config, attr_definitions.get_definition(attrname), attr_filter, checked_ind ) result.append(tree_item_str) result.append(',\n') if len(attrlist) > 0: result = result[:-1] result.append('\n') result.append( ' ] }') result.append(',\n') attrlist = category_dict['Uncategorized'] if len( attrlist ) > 0: for attrname in sorted(attrlist): checked_ind = 0 if filter_name is None: # if there is no specified filter, then set the checked indicator based on the overall setting checked_ind = checked else: # otherwise, if a filter is specified, then set the checked indicator based on if the attribute # name is specified in the filter list if attrname in attr_filter: checked_ind = 1 tree_item_str = get_attr_def_item_json( global_config, attr_definitions.get_definition(attrname), attr_filter, checked_ind ) result.append(tree_item_str) result.append(',\n') result = result[:-1] result.append('],\n "id": 1 \n}\n') result.append('],\n "id": 0 \n}\n') json_str = ''.join(result) if store_data_to_file: try: if filter_name == None: file_name = 'attrtree.json' else: file_name = 'attrtree_%s.json' % filter_name FileSync.put( global_config, '%s/EventData/%s' % (competition,file_name), 'text', json_str) except: raise return json_str
def PUT(self, request_path): content_type = web.ctx.env['CONTENT_TYPE'] FileSync.put(global_config, request_path, content_type, web.data()) return
def processClientConnection( self, client_sock, client_info ): print "Accepted connection from ", client_info files_received = 0 try: while True: msg_header, msg_body, content_type = self.read_request( client_sock ) if len(msg_header) == 0: break print "Message Header: %s" % msg_header print "Message Body Length: %d" % len(msg_body) msg_header_lines = msg_header.splitlines() request_type, request_path = msg_header_lines[0].split(' ',1) print "Request Type: %s" % request_type print "Request Path: %s" % request_path request_complete = False # retrieve any params attached to the requested entity params_offset = request_path.find('?') if params_offset != -1: request_params = request_path[params_offset:] request_params = request_params.lstrip('?') request_path = request_path[0:params_offset] request_path = request_path.lstrip('/') # if the requested path starts with 'static', then let's assume that # the request knows the full path that it's looking for, otherwise, # we will prepend the path with the path to the data directory if request_path.startswith('static'): fullpath = './' + request_path else: fullpath = './static/data/' + request_path if request_type == "PUT": # make sure that the destination directory exists if not os.path.exists(os.path.dirname(fullpath)): os.makedirs(os.path.dirname(fullpath)) response_code = FileSync.put_file(fullpath, content_type, msg_body) client_sock.send('HTTP/1.1 ' + response_code + '\r\n') files_received += 1 elif request_type == "POST": response_code = "400 Bad Request" path_elems = request_path.split('/') if len(path_elems) >= 2: comp_season_list = WebCommonUtils.split_comp_str(path_elems[0]) if comp_season_list != None: result = False error = False # for the sync of event and team data, the URI path is of the following # format /Sync/<comp>/EventData/[TeamData/]. if just EventData is provided, # then the event data is regenerated, if TeamData is provided, then both # the event data and team data is regenerated if len(path_elems) >= 2 and path_elems[1] == 'EventData': result = WebEventData.update_event_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebTeamData.update_team_event_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebAttributeDefinitions.update_event_data_files( self.global_config, path_elems[1] ) if result == False: error = True if len(path_elems) >= 3 and path_elems[2] == 'TeamData' and error is False: try: team = path_elems[3] if team == '': team = None except: team = None result = WebTeamData.update_team_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[2], team ) if result == True: response_code = "200 OK" client_sock.send('HTTP/1.1 ' + response_code + '\r\n') elif request_type == "GET": # Parse any params attached to this GET request params = request_params.split(';') for param in params: # split the parameter into the tag and value parsed_param = param.split('=') tag = parsed_param[0] value = parsed_param[1] # process the parameter # check to see if the requested path exists. We may need to handle that # condition separately, treating non-existent directories as empty (as # opposed to sending a 404 not found. # TODO: update the client side to handle the 404 not found as an empty directory # and then update this block to send the 404 in all cases. if not os.path.exists(fullpath): if request_path[-1] == '/': # if the requested path refers to a directory, let's return an empty # response indicating that there are no files in that directory client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: 0\r\n') client_sock.send('\r\n\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') request_complete = True if not request_complete: if os.path.isdir(fullpath): file_list = FileSync.get_file_list(fullpath) response_body = '' for file_name in file_list: response_body += file_name + '\n' client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: response_body = FileSync.get_file(fullpath) if response_body != '': client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') print "Request Complete\n" except IOError: pass print "disconnected" client_sock.close()
def get_event_matchresults_json(global_config, year, event_code, round_str, team_str = None): global_config['logger'].debug( 'GET Event Results Json' ) if round_str == 'qual': match_selector = 'qm' elif round_str == 'quarters': match_selector = 'qf' elif round_str == 'semis': match_selector = 'sf' elif round_str == 'finals': match_selector = 'f' # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()), api_version='v3' ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Match', 'Start Time', 'Red 1', 'Red 2', 'Red 3', 'Blue 1', 'Blue 2', 'Blue 3', 'Red Score', 'Blue Score' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "matchresults" : [\n') # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: if str(match['comp_level']) == match_selector: result.append(' [ ') # Match number try: if global_config['json_no_links'] == 'Yes': result.append( '"%s", ' % str(match['match_number']) ) else: raise except: result.append( '"%s", ' % get_match_hyperlink(competition, match) ) # Match start time match_epoch_time = int(match['time']) time_format_str = '%a %b %d - %I:%M %p' match_time_str = datetime.datetime.fromtimestamp(match_epoch_time).strftime(time_format_str) result.append( '"%s", ' % match_time_str ) try: if global_config['json_no_links'] == 'Yes': # Red alliance teams result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) # Blue alliance teams result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) else: raise except: # Red alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) ) # Blue alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) ) # Red alliance score score = str(match['alliances']['red']['score']) if score == '-1': score = '-' result.append( '"%s", ' % score ) # Blue alliance score score = str(match['alliances']['blue']['score']) if score == '-1': score = '-' result.append( '"%s" ' % score ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename), 'text', json_str) except: raise return json_str
#!/usr/bin/env jython import java.io.File import FileSync print "Creating /dev/shm/fstab if not exists."; md5 = FileSync.md5sum( java.io.File( "/etc/fstab" ) ); FileSync.RecvFile( java.io.File( "/dev/shm" ), "fstab", "file:///etc/fstab", md5 );
def get_event_standings_json(global_config, year, event_code): global_config['logger'].debug( 'GET Event Rankings Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] tba_data = get_event_data_from_tba( '%s%s/rankings' % (year,event_code.lower()) ) result.append('{ "event" : "%s",\n' % (event_code.lower())) if tba_data: rankings = tba_data.get('rankings') if rankings is not None: result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Rank', 'Team', 'Record', 'Matches_Played', 'Dq' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "rankings" : [\n') for line in rankings: result.append(' [ ') for item in headings: key = item.lower() if key == 'record': result.append('"%s-%s-%s"' % (str(line[key]['wins']),str(line[key]['losses']),str(line[key]['ties']))) elif key == 'team': team_str = line['team_key'].replace('frc','') result.append(('"<a href=\\"/teamdata/%s/'% competition)+team_str+'\\">'+team_str+'</a>"') else: result.append('"%s"' % (str(line[key]))) result.append(', ') if len(line) > 0: result = result[:-1] result.append(' ],\n') if len(rankings) > 1: result = result[:-1] result.append(' ]\n') store_data_to_file = True result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/rankings.json' % (competition) ) if stored_file_data != '': return stored_file_data else: # no stored data either, so let's just return a formatted, but empty payload result.append(' "last_updated": "%s",\n' % time.strftime('%c')) result.append(' "columns" : [],\n') result.append(' "rankings" : []\n') result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/rankings.json' % (competition), 'text', json_str) except: raise return json_str
def get_event_stats_json(global_config, year, event_code, stat_type): global_config['logger'].debug( 'GET Event Results Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) event_stats = get_event_data_from_tba( '%s%s/oprs' % (year,event_code.lower()) ) if len(event_stats): # rankings is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Team', stat_type.rstrip('s').upper() ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "stats" : [\n') try: stats_dict = event_stats[stat_type] for key, value in stats_dict.iteritems(): result.append( ' ["%s", %.2f' % (get_team_hyperlink( competition, key.lstrip('frc') ),value) ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') except: global_config['logger'].debug( 'No Statistics Data For %s' % stat_type ) result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/eventstats_%s.json' % (competition,stat_type), 'text', json_str) except: raise return json_str
def GET(self, request_path): response_body = FileSync.get(global_config, request_path) return response_body
def get_event_matchschedule_json(global_config, year, event_code, team_str = None): global_config['logger'].debug( 'GET Event Schedule Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()) ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) match_schedule = dict() match_schedule['event'] = event_code.lower() match_schedule['columns'] = [ 'Round', 'Match', 'Red_1', 'Red_2', 'Red_3', 'Blue_1', 'Blue_2', 'Blue_3' ] match_schedule['qualification'] = [] match_schedule['quarter_finals'] = [] match_schedule['semi_finals'] = [] match_schedule['finals'] = [] if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: comp_level = match['comp_level'] if comp_level in ('qf', 'sf'): match_str = '%s-%s' % (match['set_number'],match['match_number']) else: match_str = str(match['match_number']) match_entry = [ comp_level, match_str, match['alliances']['red']['team_keys'][0].lstrip('frc'), match['alliances']['red']['team_keys'][1].lstrip('frc'), match['alliances']['red']['team_keys'][2].lstrip('frc'), match['alliances']['blue']['team_keys'][0].lstrip('frc'), match['alliances']['blue']['team_keys'][1].lstrip('frc'), match['alliances']['blue']['team_keys'][2].lstrip('frc') ] if comp_level == 'qm': match_schedule['qualification'].append(match_entry) elif comp_level == 'qf': match_schedule['quarter_finals'].append(match_entry) elif comp_level == 'sf': match_schedule['semi_finals'].append(match_entry) elif comp_level == 'f': match_schedule['finals'].append(match_entry) store_data_to_file = True # the qualification match schedule needs to be sorted, the sort will be done by the second # element of each row, which is the match number match_schedule['qualification'].sort(key=lambda match_list: int(match_list[1])) else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename) ) if stored_file_data != '': return stored_file_data json_str = json.dumps(match_schedule) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename), 'text', json_str) except: raise return json_str
def run(self): server_sock=BluetoothSocket( RFCOMM ) server_sock.bind(("",PORT_ANY)) server_sock.listen(1) port = server_sock.getsockname()[1] uuid = "00001073-0000-1000-8000-00805F9B34F7" advertise_service( server_sock, "TTTService", service_id = uuid, service_classes = [ uuid, SERIAL_PORT_CLASS ], profiles = [ SERIAL_PORT_PROFILE ] ) while not self.shutdown: print "Waiting for connection on RFCOMM channel %d" % port client_sock, client_info = server_sock.accept() print "Accepted connection from ", client_info files_received = 0 try: while True: msg_header, msg_body, content_type = self.read_request( client_sock ) if len(msg_header) == 0: break print "Message Header: %s" % msg_header print "Message Body Length: %d" % len(msg_body) msg_header_lines = msg_header.splitlines() request_type, request_path = msg_header_lines[0].split(' ',1) print "Request Type: %s" % request_type print "Request Path: %s" % request_path if request_type == "PUT": fullpath = './static/data/' + request_path response_code = FileSync.put_file(fullpath, content_type, msg_body) client_sock.send('HTTP/1.1 ' + response_code + '\r\n') files_received += 1 elif request_type == "GET": fullpath = './static/data/' + request_path if os.path.isdir(fullpath): file_list = FileSync.get_file_list(fullpath) response_body = '' for file_name in file_list: response_body += file_name + '\n' client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: response_body = FileSync.get_file(fullpath) if response_body != '': client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') print "Request Complete\n" except IOError: pass print "disconnected" client_sock.close() server_sock.close() print "Bluetooth Sync Server Terminated"
print "Num Msg Parts: %d" % num_msg_parts break print "Message Header: %s" % msg_header print "Message Body: %s" % msg_body print "Message Body Length: %d" % len(msg_body) msg_header_lines = msg_header.splitlines() request_type, request_path = msg_header_lines[0].split(' ',1) print "Request Type: %s" % request_type print "Request Path: %s" % request_path if request_type == "PUT": fullpath = './static/data/' + request_path response_code = FileSync.put_file(fullpath, 'text/plain', msg_body) client_sock.send('HTTP/1.1 ' + response_code + '\n') files_received += 1 elif request_type == "GET": fullpath = './static/data/' + request_path if os.path.isdir(fullpath): file_list = FileSync.get_file_list(fullpath) response_body = '' for file_name in file_list: response_body += file_name + '\n' client_sock.send('HTTP/1.1 ' + '200 OK' + '\n\n') client_sock.send(response_body + '\n') else: response_body = FileSync.get_file(fullpath) if response_body != '':
def get_team_scouting_data_summary_json(global_config, comp, name, attr_filter=[], filter_name=None, store_json_file=False): global_config['logger'].debug( 'GET Team %s Scouting Data For Competition %s', name, comp ) season = WebCommonUtils.map_comp_to_season(comp) session = DbSession.open_db_session(global_config['db_name'] + season) if global_config['attr_definitions'] == None: return None attrdef_filename = WebCommonUtils.get_attrdef_filename(comp=comp) attr_definitions = AttributeDefinitions.AttrDefinitions(global_config) attr_definitions.parse(attrdef_filename) result = [] result.append('{ "competition" : "%s", "team" : "%s",\n' % (comp,name)) result.append(' "scouting_data_summary" : [\n') team_attributes = DataModel.getTeamAttributesInOrder(session, name, comp) if len(team_attributes) > 0: some_attr_added = False for attribute in team_attributes: attr_def = attr_definitions.get_definition( attribute.attr_name ) include_attr = False if attr_def: if attr_def.has_key('Include_In_Team_Display') \ and attr_def['Include_In_Team_Display'] == 'Yes': include_attr = True elif attr_def.has_key('Include_In_Report') \ and attr_def['Include_In_Report'] == 'Yes': include_attr = True elif attr_def.has_key('Weight') \ and attr_def['Weight'] != '0': include_attr = True # if an attribute filter has been provided, only include the attribute data if the # attribute is in the filter if len(attr_filter) > 0: if attr_def['Name'] not in attr_filter: include_attr = False if include_attr == True: some_attr_added = True if attr_def.has_key('Display_Name'): attr_name = attr_def['Display_Name'] else: attr_name = attr_def['Name'] category = attr_def.get('Sub_Category', '') result.append(' { "name": "%s", "matches": "%s", "cumulative_value": "%s", "average_value": "%s", "all_values": "%s", "category": "%s" }' % \ (attr_name,str(attribute.num_occurs),str(attribute.cumulative_value),str(round(attribute.avg_value,1)),\ DataModel.mapAllValuesToShortenedString(attr_def, attribute.all_values), category) ) result.append(',\n') if some_attr_added: result = result[:-1] result.append(' ] }\n') json_str = ''.join(result) if store_json_file is True: try: FileSync.put( global_config, '%s/EventData/TeamData/team%s_scouting_data_summary.json' % (comp,name), 'text', json_str) except: raise session.remove() return json_str