def process_debrief_files(global_config, input_dir, recursive=True): # Initialize the database session connections issues_db_name = global_config['issues_db_name'] + global_config['this_season'] debrief_db_name = global_config['debriefs_db_name'] + global_config['this_season'] debrief_session = DbSession.open_db_session(debrief_db_name) issues_session = DbSession.open_db_session(issues_db_name) # Create the database if it doesn't already exist #if not os.path.exists('./' + db_name): # DebriefDataModel.create_db_tables(my_db) # The following regular expression will select all files that conform to # the file naming format Debrief*.txt. Build a list of all datafiles that match # the naming format within the directory passed in via command line # arguments. file_regex = re.compile('Debrief[a-zA-Z0-9_-]+.txt') files = get_files(global_config, debrief_session, debrief_db_name, input_dir, file_regex, recursive) # Process data files for data_filename in files: print 'processing %s'%data_filename try: # Initialize the debrief_attributes dictionary in preparation for the # parsing of the data file debrief_attributes = {} # Parse the data file, storing all the information in the attributes # dictionary FileParser.FileParser(data_filename).parse(debrief_attributes) DebriefDataModel.addDebriefFromAttributes(debrief_session, debrief_attributes) # Also, extract the competition name, too, if it has been included in # the data file if debrief_attributes.has_key('Competition'): competition = debrief_attributes['Competition'] issue_base_name = WebCommonUtils.split_comp_str(competition)[0] else: competition = global_config['this_competition'] + global_config['this_season'] issue_base_name = global_config['this_competition'] if competition == None: raise Exception( 'Competition Not Specified!') # At competition, we will likely have multiple laptops manging the data, but we want # only one machine to be responsible for the issues database. In all likelihood, # that machine will be the one in the pits, or possibly the application running # in the cloud. if global_config['issues_db_master'] == 'Yes': match_id = debrief_attributes['Match'] submitter = debrief_attributes['Scouter'] timestamp = str(int(time.time())) subgroup = 'Unassigned' status = 'Open' owner = 'Unassigned' if debrief_attributes.has_key('Issue1_Summary') or debrief_attributes.has_key('Issue1_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue1' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue1_Summary'): summary = debrief_attributes['Issue1_Summary'] else: summary = debrief_attributes['Issue1_Description'] if debrief_attributes.has_key('Issue1_Description'): description = debrief_attributes['Issue1_Description'] else: description = debrief_attributes['Issue1_Summary'] if debrief_attributes.has_key('Issue1_Priority'): priority = debrief_attributes['Issue1_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue1_Taskgroup'): component = debrief_attributes['Issue1_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) if debrief_attributes.has_key('Issue2_Summary') or debrief_attributes.has_key('Issue2_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue2' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue2_Summary'): summary = debrief_attributes['Issue2_Summary'] else: summary = debrief_attributes['Issue2_Description'] if debrief_attributes.has_key('Issue2_Description'): description = debrief_attributes['Issue2_Description'] else: description = debrief_attributes['Issue2_Summary'] if debrief_attributes.has_key('Issue2_Priority'): priority = debrief_attributes['Issue2_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue2_Taskgroup'): component = debrief_attributes['Issue2_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) if debrief_attributes.has_key('Issue3_Summary') or debrief_attributes.has_key('Issue3_Description'): # look to see if there is already a debrief issue, and if so, do not attempt to create/update # an issue, as there are already other issue files that would then conflict with this one issue_key = 'Issue3' if DebriefDataModel.getDebriefIssue(debrief_session, competition, match_id, issue_key) == None: # if no summary is provided, then use the description as the summary. Likewise, if no description # is provided then use the summary as the description. Keep in mind that we need at least the # summary or description to be provided. if debrief_attributes.has_key('Issue3_Summary'): summary = debrief_attributes['Issue3_Summary'] else: summary = debrief_attributes['Issue3_Description'] if debrief_attributes.has_key('Issue3_Description'): description = debrief_attributes['Issue3_Description'] else: description = debrief_attributes['Issue3_Summary'] if debrief_attributes.has_key('Issue3_Priority'): priority = debrief_attributes['Issue3_Priority'] else: priority = 'Priority_3' if debrief_attributes.has_key('Issue3_Taskgroup'): component = debrief_attributes['Issue3_Taskgroup'] else: component = '' debrief_key = str(match_id) + '_' + issue_key issue_id = IssueTrackerDataModel.getIssueId(issues_session, issue_base_name) issue = IssueTrackerDataModel.addOrUpdateIssue(issues_session, issue_id, summary, status, priority, subgroup, component, submitter, owner, description, timestamp, debrief_key) if issue != None: issue.create_file('./static/data/%s/ScoutingData' % competition) DebriefDataModel.addOrUpdateDebriefIssue(debrief_session, match_id, competition, issue_id, issue_key) except Exception, e: # log the exception but continue processing other files log_exception(global_config['logger'], e) # add the file to the set of processed files so that we don't process it again. Do it outside the # try/except block so that we don't try to process a bogus file over and over again. DebriefDataModel.addProcessedFile(debrief_session, data_filename)
def processClientConnection( self, client_sock, client_info ): print "Accepted connection from ", client_info files_received = 0 try: while True: msg_header, msg_body, content_type = self.read_request( client_sock ) if len(msg_header) == 0: break print "Message Header: %s" % msg_header print "Message Body Length: %d" % len(msg_body) msg_header_lines = msg_header.splitlines() request_type, request_path = msg_header_lines[0].split(' ',1) print "Request Type: %s" % request_type print "Request Path: %s" % request_path request_complete = False # retrieve any params attached to the requested entity params_offset = request_path.find('?') if params_offset != -1: request_params = request_path[params_offset:] request_params = request_params.lstrip('?') request_path = request_path[0:params_offset] request_path = request_path.lstrip('/') # if the requested path starts with 'static', then let's assume that # the request knows the full path that it's looking for, otherwise, # we will prepend the path with the path to the data directory if request_path.startswith('static'): fullpath = './' + request_path else: fullpath = './static/data/' + request_path if request_type == "PUT": # make sure that the destination directory exists if not os.path.exists(os.path.dirname(fullpath)): os.makedirs(os.path.dirname(fullpath)) response_code = FileSync.put_file(fullpath, content_type, msg_body) client_sock.send('HTTP/1.1 ' + response_code + '\r\n') files_received += 1 elif request_type == "POST": response_code = "400 Bad Request" path_elems = request_path.split('/') if len(path_elems) >= 2: comp_season_list = WebCommonUtils.split_comp_str(path_elems[0]) if comp_season_list != None: result = False error = False # for the sync of event and team data, the URI path is of the following # format /Sync/<comp>/EventData/[TeamData/]. if just EventData is provided, # then the event data is regenerated, if TeamData is provided, then both # the event data and team data is regenerated if len(path_elems) >= 2 and path_elems[1] == 'EventData': result = WebEventData.update_event_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebTeamData.update_team_event_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[1] ) if result == True: result = WebAttributeDefinitions.update_event_data_files( self.global_config, path_elems[1] ) if result == False: error = True if len(path_elems) >= 3 and path_elems[2] == 'TeamData' and error is False: try: team = path_elems[3] if team == '': team = None except: team = None result = WebTeamData.update_team_data_files( self.global_config, comp_season_list[1], comp_season_list[0], path_elems[2], team ) if result == True: response_code = "200 OK" client_sock.send('HTTP/1.1 ' + response_code + '\r\n') elif request_type == "GET": # Parse any params attached to this GET request params = request_params.split(';') for param in params: # split the parameter into the tag and value parsed_param = param.split('=') tag = parsed_param[0] value = parsed_param[1] # process the parameter # check to see if the requested path exists. We may need to handle that # condition separately, treating non-existent directories as empty (as # opposed to sending a 404 not found. # TODO: update the client side to handle the 404 not found as an empty directory # and then update this block to send the 404 in all cases. if not os.path.exists(fullpath): if request_path[-1] == '/': # if the requested path refers to a directory, let's return an empty # response indicating that there are no files in that directory client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: 0\r\n') client_sock.send('\r\n\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') request_complete = True if not request_complete: if os.path.isdir(fullpath): file_list = FileSync.get_file_list(fullpath) response_body = '' for file_name in file_list: response_body += file_name + '\n' client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: response_body = FileSync.get_file(fullpath) if response_body != '': client_sock.send('HTTP/1.1 ' + '200 OK' + '\r\n') client_sock.send('Content-Length: %d\r\n' % len(response_body)) client_sock.send('\r\n') client_sock.send(response_body + '\r\n') else: client_sock.send('HTTP/1.1 ' + '404 Not Found' + '\r\n\r\n') print "Request Complete\n" except IOError: pass print "disconnected" client_sock.close()