def run_bro(fpcap, certs_dir): ''' ''' all_log_records = {} # Get the absolute path of the PCAP cause will change working directory fpcap = os.path.abspath(fpcap) # Get current working directory orig_cwd = os.getcwd() # Create a unique outdir for this pcap outdir = '{}{}_{}'.format(TMP_DIR, os.path.basename(fpcap).split('.')[0], time.time()) os.mkdir(outdir) # Move to the directory that we want to store Bro logs os.chdir(outdir) # Clean temporary file contents (just in case) os.system("rm -f *.log 2> /dev/null") os.system("rm -f *.pem 2> /dev/null") # Run Bro (2.5) on the pcap file cmd = "%s -C %s %s %s -r %s 2> /dev/null" %\ (BRO_BIN, BRO_SCRIPT_25, BRO_HTTP_SCRIPT, BRO_SSL_STATS_SCRIPT, fpcap) bro = subp.Popen(cmd, shell=True) bro.wait() # Check and parse all logs found in the outdir logs = glob.glob('*.log') for log in logs: # This is a log file used internally by our Bro script; Ignore it if log == 'dns_cache.log': continue # When Bro fails our custom logs are created but they are empty if not os.path.getsize(log): continue # Get all records from this log records = [] for entry in parse_log(log): records.append(entry._asdict()) # Store records for this log if records: all_log_records[log] = records # Clean temporary file contents os.system("rm -f *.log 2> /dev/null") # Store certificates in certs directory os.system("mv *.pem {}/ 2> /dev/null".format(certs_dir)) # Go back to original working directory os.chdir(orig_cwd) # Remove unique directory created. os.system("rm -r {} 2> /dev/null".format(outdir)) return all_log_records
def test_blackbox_parse_file(self): log_files = glob.glob(self.log_dir) self.assertGreater(len(log_files), 1) for log_file in log_files: print(log_file) with self.subTest(i=log_file): with open(log_file, "r") as f: for _ in brologparse.parse_log(f): pass
def filter_bro_log(filename): fields = get_bro_fields(filename) if not fields: return None result = [] for fileline in parse_log(file): logline = fileline._asdict() #convert tuple to dict line = {} for f in fields: # get the relevant fields line[f] = logline[f] result.append(line) return result
def debug(): """This function loops through all log files and shows you what data could be expected""" for file in glob("./*.log"): i=0 try: print('showing',file) for entry in parse_log(file): # entry._fields: Tuple of strings listing the field names # entry._asdict(): Return a new OrderedDict which maps field names to their corresponding values print(entry) i += 1 if i == 2: break except Exception as e: print('ERROR content of file was not displayed:',e)
def debug(): """This function loops through all log files and shows you what data could be expected""" for file in glob("./*.log"): i = 0 try: print('showing', file) for entry in parse_log(file): # entry._fields: Tuple of strings listing the field names # entry._asdict(): Return a new OrderedDict which maps field names to their corresponding values print(entry) i += 1 if i == 2: break except Exception as e: print('ERROR content of file was not displayed:', e)
def main(args): """ """ cwd = os.getcwd() if not os.path.isdir(args.dcerts): os.mkdir(args.dcerts) certs_dir = os.path.abspath(args.dcerts) # Move to the logs dir os.chdir(args.dir) # Prepare list of pcap files to be processed logs = glob.glob('*.log') if not logs: sys.stderr.write('[-] Missed bro logs.\n') exit(1) # Total operations to be done total_logs_cnt = len(logs) + 1 with open(os.path.join(cwd, args.ofile), 'w', 0) as fw: first = True #TODO: Transform input dir name into hash fw.write('["{}", "NOMD5", '.format(args.dir)) fw.write('{') for pos, log in enumerate(logs): pb.update_progress_bar(pos, total_logs_cnt) # This is a log file used internally by our Bro script; Ignore it if log == 'dns_cache.log': continue # With large files we will only parse these files if args.optimize and log != 'ssl_dm.log' and log != 'ssl_certs.log': continue # When Bro fails our custom logs are created but they are empty if not os.path.getsize(log): sys.stderr.write('[-] Log is empty: {}\n'.format(log)) continue sys.stderr.write("[+] Parsing log file: {}\n".format(log)) try: # Get all records from this log records = parse_log(os.path.abspath(log)) # Store records for this log in JSON format if records: if not first: fw.write(',') fw.write('"' + log + '":[') for n, entry in enumerate(records): if n: fw.write(',') json.dump(entry._asdict(), fw) fw.flush() fw.write(']') first = False else: sys.stderr.write( '[-] No records for log: {}\n'.format(log)) except Exception as e: sys.stderr.write('\t[!] Error: {}\n'.format(repr(e))) fw.flush() fw.write('}]\n') pb.update_progress_bar(pos, total_logs_cnt - 1) # Store certificates in certs directory os.system("cp *.pem {}/ 2> /dev/null".format(certs_dir)) pb.update_progress_bar(pos, total_logs_cnt) # Return to current working directory os.chdir(cwd)