def get_team_comp_list(this_comp, team): my_config = ScoutingAppMainWebServer.global_config complist = list() if this_comp == None: this_comp = my_config["this_competition"] + my_config["this_season"] season = my_config["this_season"] else: season = map_comp_to_season(this_comp) complist.append(this_comp) team_complist = WebTeamData.get_team_event_list_from_tba(my_config, team, season) if not team_complist: session = DbSession.open_db_session(my_config["db_name"] + my_config["this_season"]) team_scores = DataModel.getTeamScore(session, team) for score in team_scores: comp = score.competition.upper() # currently, the competition season is stored in the database # as part of the competition. So, we need to add it for the comparison, # but not as we define the complist itself if comp != this_comp.upper(): complist.append(comp) else: for comp in team_complist: if comp.upper() != this_comp.upper(): complist.append(comp) return complist
def GET(self, param_str): WebLogin.check_access(global_config,10) params = param_str.split('/') numparams = len(params) if numparams < 2 or params[1] == '': comp = global_config['this_competition'] + global_config['this_season'] else: comp = params[1] return WebTeamData.get_team_rankings_json(global_config, comp)
def GET(self, param_str): WebLogin.check_access(global_config,10) params = param_str.split('/') numparams = len(params) if numparams == 0: comp = global_config['this_competition'] + global_config['this_season'] else: comp = params[0].lower() return WebTeamData.get_team_list_json(global_config, comp)
def GET(self, param_str): user_info = WebLogin.check_access(global_config,10) params = param_str.split('/') numparams = len(params) result = None if numparams >= 2: comp = params[0] name = params[1] result = WebTeamData.get_team_scouting_mediafiles_json(global_config,comp,name) return result
def GET(self, param_str): WebLogin.check_access(global_config,10) params = param_str.split('/') numparams = len(params) if numparams == 1: comp = global_config['this_competition'] + global_config['this_season'] name = params[0] elif numparams >= 2: comp = params[0] name = params[1] else: return None return WebTeamData.get_team_score_json(global_config, name, comp)
def GET(self, param_str): user_info = WebLogin.check_access(global_config,10) params = param_str.split('/') numparams = len(params) result = None if numparams >= 2: comp = params[0] name = params[1] access_level = user_info[1] # do not return notes for guest accounts, just in case the notes aren't very GP if access_level < 10: result = WebTeamData.get_team_scouting_notes_json(global_config,comp,name) return result
def get_event_matchschedule_json(global_config, year, event_code, team_str = None): global_config['logger'].debug( 'GET Event Schedule Json' ) # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()) ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) match_schedule = dict() match_schedule['event'] = event_code.lower() match_schedule['columns'] = [ 'Round', 'Match', 'Red_1', 'Red_2', 'Red_3', 'Blue_1', 'Blue_2', 'Blue_3' ] match_schedule['qualification'] = [] match_schedule['quarter_finals'] = [] match_schedule['semi_finals'] = [] match_schedule['finals'] = [] if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: comp_level = match['comp_level'] if comp_level in ('qf', 'sf'): match_str = '%s-%s' % (match['set_number'],match['match_number']) else: match_str = str(match['match_number']) match_entry = [ comp_level, match_str, match['alliances']['red']['team_keys'][0].lstrip('frc'), match['alliances']['red']['team_keys'][1].lstrip('frc'), match['alliances']['red']['team_keys'][2].lstrip('frc'), match['alliances']['blue']['team_keys'][0].lstrip('frc'), match['alliances']['blue']['team_keys'][1].lstrip('frc'), match['alliances']['blue']['team_keys'][2].lstrip('frc') ] if comp_level == 'qm': match_schedule['qualification'].append(match_entry) elif comp_level == 'qf': match_schedule['quarter_finals'].append(match_entry) elif comp_level == 'sf': match_schedule['semi_finals'].append(match_entry) elif comp_level == 'f': match_schedule['finals'].append(match_entry) store_data_to_file = True # the qualification match schedule needs to be sorted, the sort will be done by the second # element of each row, which is the match number match_schedule['qualification'].sort(key=lambda match_list: int(match_list[1])) else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename) ) if stored_file_data != '': return stored_file_data json_str = json.dumps(match_schedule) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchschedule%s.json' % (competition,exp_filename), 'text', json_str) except: raise return json_str
def get_event_matchresults_json(global_config, year, event_code, round_str, team_str = None): global_config['logger'].debug( 'GET Event Results Json' ) if round_str == 'qual': match_selector = 'qm' elif round_str == 'quarters': match_selector = 'qf' elif round_str == 'semis': match_selector = 'sf' elif round_str == 'finals': match_selector = 'f' # derive our competition name from the FIRST event code competition = WebCommonUtils.map_event_code_to_comp(year+event_code) store_data_to_file = False result = [] result.append('{ "event" : "%s",\n' % (event_code.lower())) if team_str is None: exp_filename = '' event_matches = get_event_data_from_tba( '%s%s/matches' % (year,event_code.lower()), api_version='v3' ) else: exp_filename = '_%s' % team_str event_matches = WebTeamData.get_team_data_from_tba( team_str, 'event/%s%s/matches' % (year,event_code.lower()) ) if len(event_matches): # matches is now a list of lists, with the first element of the list being the list of column headings # take the list of columngs and apply to each of the subsequent rows to build the json response result.append(' "last_updated": "%s",\n' % time.strftime('%c')) headings = [ 'Match', 'Start Time', 'Red 1', 'Red 2', 'Red 3', 'Blue 1', 'Blue 2', 'Blue 3', 'Red Score', 'Blue Score' ] result.append(' "columns" : [\n') for heading in headings: result.append(' { "sTitle": "%s" }' % heading) result.append(',\n') if len(headings)>0: result = result[:-1] result.append(' ],\n') result.append(' "matchresults" : [\n') # the entire match set is returned from TBA, filter out the matches for the desired round for match in event_matches: if str(match['comp_level']) == match_selector: result.append(' [ ') # Match number try: if global_config['json_no_links'] == 'Yes': result.append( '"%s", ' % str(match['match_number']) ) else: raise except: result.append( '"%s", ' % get_match_hyperlink(competition, match) ) # Match start time match_epoch_time = int(match['time']) time_format_str = '%a %b %d - %I:%M %p' match_time_str = datetime.datetime.fromtimestamp(match_epoch_time).strftime(time_format_str) result.append( '"%s", ' % match_time_str ) try: if global_config['json_no_links'] == 'Yes': # Red alliance teams result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) # Blue alliance teams result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) result.append( '"%s", ' % str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) else: raise except: # Red alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['red']['team_keys'][2]).lstrip('frc') ) ) # Blue alliance teams result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][0]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][1]).lstrip('frc') ) ) result.append( '"%s", ' % get_team_hyperlink( competition, str(match['alliances']['blue']['team_keys'][2]).lstrip('frc') ) ) # Red alliance score score = str(match['alliances']['red']['score']) if score == '-1': score = '-' result.append( '"%s", ' % score ) # Blue alliance score score = str(match['alliances']['blue']['score']) if score == '-1': score = '-' result.append( '"%s" ' % score ) result.append(' ],\n') store_data_to_file = True if store_data_to_file is True: result = result[:-1] result.append(' ]\n') result.append(' ]\n') else: # we were not able to retrieve the data from FIRST, so let's return any stored file with the # information, otherwise we will return an empty json payload stored_file_data = FileSync.get( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename) ) if stored_file_data != '': return stored_file_data result.append(' }\n') json_str = ''.join(result) if store_data_to_file: try: FileSync.put( global_config, '%s/EventData/matchresults_%s%s.json' % (competition,round_str,exp_filename), 'text', json_str) except: raise return json_str
def GET(self, filename): WebLogin.check_access(global_config,10) result = WebTeamData.get_team_datafile_json(global_config, filename) return result
def GET(self): WebLogin.check_access(global_config,10) return WebTeamData.get_team_attributes_page(global_config)
def GET(self): WebLogin.check_access(global_config,10) return WebTeamData.get_team_rankings_array(global_config)
def GET(self, comp, name): WebLogin.check_access(global_config,10) return WebTeamData.get_team_attr_rankings_page(global_config,comp,name)
def GET(self, name): WebLogin.check_access(global_config,10) return WebTeamData.get_team_notes_page(global_config, name)
def GET(self, name): user_info = WebLogin.check_access(global_config,10) access_level = user_info[1] result = WebTeamData.get_team_info_json(global_config, name) return result
def processClientConnection( self, client_sock, client_info ): print "Accepted connection from ", client_info files_received = 0 try: while True: msg_header, msg_body, content_type = self.read_request( client_sock ) if len(msg_header) == 0: break print "Message Header: %s" % msg_header print "Message Body Length: %d" % len(msg_body) msg_header_lines = msg_header.splitlines() request_type, request_path = msg_header_lines[0].split(' ',1) print "Request Type: %s" % request_type print "Request Path: %s" % request_path request_complete = False # retrieve any params attached to the requested entity params_offset = request_path.find('?') if params_offset != -1: request_params = request_path[params_offset:] request_params = request_params.lstrip('?') request_path = request_path[0:params_offset] request_path = request_path.lstrip('/') # if the requested path starts with 'static', then let's assume that # the request knows the full path that it's looking for, otherwise, # we will prepend the path with the path to the data directory if request_path.startswith('static'): fullpath = './' + request_path else: fullpath = './static/data/' + request_path if request_type == "PUT": # make sure that the destination directory exists if not os.path.exists(os.path.dirname(fullpath)): os.makedirs(os.path.dirname(fullpath)) response_code = FileSync.put_file(fullpath, content_type, msg_body) client_sock.send('HTTP/1.1 ' + response_code + '\r\n') files_received += 1 elif request_type == "POST": response_code = "400 Bad Request" path_elems = request_path.split('/') if len(path_elems) >= 2: comp_season_list = WebCommonUtils.split_comp_str(path_elems[0]) if comp_season_list != None: result = False error = False # for the sync of event and team data, the URI path is of the following # format /Sync/<comp>/EventData/[TeamData/]. if just EventData is provided, # then the event data is regenerated, if TeamData is provided, then both # the event data and team data is regenerated if len(path_elems) >= 2 and path_elems[1] == 'EventData': result = WebEventData.update_event_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebTeamData.update_team_event_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebAttributeDefinitions.update_event_data_files( self.global_config, path_elems[1] ) if result == False: error = True if len(path_elems) >= 3 and path_elems[2] == 'TeamData' and error is False: try: team = path_elems[3] if team == '': team = None except: team = None result = WebTeamData.update_team_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[2], team ) if result == True: response_code = "200 OK" client_sock.send('HTTP/1.1 ' + response_code + '\r\n') elif request_type == "GET": # Parse any params attached to this GET request params = request_params.split(';') for param in params: # split the parameter into the tag and value parsed_param = param.split('=') tag = parsed_param[0] value = parsed_param[1] # process the parameter # check to see if the requested path exists. We may need to handle that # condition separately, treating non-existent directories as empty (as # opposed to sending a 404 not found. # TODO: update the client side to handle the 404 not found as an empty directory # and then update this block to send the 404 in all cases. if not os.path.exists(fullpath): if request_path[-1] == '/': # if the requested path refers to a directory, let's return an empty # response indicating that there are no files in that directory client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: 0\r\n') client_sock.send('\r\n\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') request_complete = True if not request_complete: if os.path.isdir(fullpath): file_list = FileSync.get_file_list(fullpath) response_body = '' for file_name in file_list: response_body += file_name + '\n' client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: response_body = FileSync.get_file(fullpath) if response_body != '': client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') print "Request Complete\n" except IOError: pass print "disconnected" client_sock.close()