def add_interesting_events(self, node_dictionary, zipfile, zipfilename): t = time.clock() self.filename = zipfilename # Figure out sizes for progress reporting for key in self.events.keys(): for name in zipfile.namelist(): if name.endswith('/' + key): self.total_size += zipfile.getinfo(name).file_size mortimer.update_progress_so_far(self.progress_queue, self.total_size, self.progress_size) for key in self.events.keys(): for name in zipfile.namelist(): if name.endswith('/' + key): tf = tempfile.TemporaryFile() tf.write(zipfile.open(name).read()) tf.seek(0) # some logs don't have a year in the timestamp, assume log file year is the one self.year = int((datetime(zipfile.getinfo(name).date_time[0], 1, 1) - datetime(1970,1,1)).total_seconds()) self.process_log(node_dictionary, key, tf) tf.close() self.process_time = time.clock() - t mortimer.update_progress_so_far(self.progress_queue, self.total_size, self.total_size) print "{}: Processing of node events took {} seconds".format(self.filename, self.process_time)
def process_log(self, node_dictionary, log_name, file): ev = self.events[log_name] if ev: # Now scan for the presence of our event. lines = 0 for line in file: lines += 1 self.progress_size += len(line) for entry in ev: if entry['event'] in line: self.add_to_stats(node_dictionary, log_name, entry, line) # Report progress every 300 lines if lines == 300: lines = 0 mortimer.update_progress_so_far(self.progress_queue, self.total_size, self.progress_size) else: print "No event entries for {}\n".format(log_name)