def main(): filenames = sys.argv[1:] if not filenames: print("usage: %s <file>..." % (sys.argv[0])) return 1 reader = unified2.FileRecordReader(*filenames) for record in reader: print_record(record)
def main(): parser = argparse.ArgumentParser() parser.add_argument("filename", nargs="*") args = parser.parse_args() if not args.filename: parser.print_usage() return reader = unified2.FileRecordReader(*args.filename) for record in reader: print_record(record)
def extract_snort(self, path): reader = unified2.FileRecordReader(path) record_list = [record for record in reader] snort_table = pd.DataFrame(record_list) snort_table['Time'] = snort_table.apply(lambda row: str( int(row['packet-second']) + int(row['packet-microsecond']) / float( 1000000)), axis=1) if self.snort_table is None: self.snort_table = snort_table else: self.snort_table = self.snort_table.merge(snort_table, how='inner') return self.snort_table
def create_producer(): producer = KafkaProducer(bootstrap_servers='mods-kafka-new:9092') #, #value_serializer=lambda x: #dumps(x).encode('latin1')) # Open file to read column_data = {} path = "/var/log/snort/" ## Read file in real time #read_file = read_realtime(path) read_file = "32_unified2.log" print(read_file) reader = unified2.FileRecordReader( read_file) #"/var/log/snort/snort.u2.1594751955") record_prev = None for index, record in enumerate(reader): # Create a unique record = event + packet (same event-id) print("###### RECORD %s " % record) try: if record_prev is not None and record.get( "event-id") == record_prev.get("event-id"): # convert record to jason and remove the unnecesary fields record_json = dumps(format_json(record, record_prev)) # Convert the json string to a dict rjson = loads(record_json) record_list = list() for key, value in rjson.items(): print(value) producer.send('SNORT', value=dumps(value).encode('utf-8')) else: print("The packet does not belong to the same event") record_prev = record if index > 10000 and record_prev.get("event-id") == 0: break time.sleep(0.01) #remove_old_files(path) except unified2.UnknownRecordType as err: if count == 0: LOG.error("%s: Is this a unified2 file?" % (err)) else: LOG.error(err)
def test_growing_file(self): log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab") log_file.write(open(self.test_filename, "rb").read()) log_file.close() reader = unified2.FileRecordReader("%s/unified2.log.0001" % self.tmpdir) for i in range(17): self.assertTrue(reader.next() is not None) self.assertTrue(reader.next() is None) log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab") log_file.write(open(self.test_filename, "rb").read()) log_file.close() for i in range(17): self.assertTrue(reader.next() is not None) self.assertTrue(reader.next() is None)
def main(): msgmap = maps.SignatureMap() classmap = maps.ClassificationMap() parser = argparse.ArgumentParser( fromfile_prefix_chars='@', epilog=epilog) parser.add_argument( "-C", dest="classification_path", metavar="<classification.config>", help="path to classification config") parser.add_argument( "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>", help="path to sid-msg.map") parser.add_argument( "-G", dest="genmsgmap_path", metavar="<gen-msg.map>", help="path to gen-msg.map") parser.add_argument( "--snort-conf", dest="snort_conf", metavar="<snort.conf>", help="attempt to load classifications and map files based on the " "location of the snort.conf") parser.add_argument( "--directory", metavar="<spool directory>", help="spool directory (eg: /var/log/snort)") parser.add_argument( "--prefix", metavar="<spool file prefix>", help="spool filename prefix (eg: unified2.log)") parser.add_argument( "--bookmark", action="store_true", default=False, help="enable bookmarking") parser.add_argument( "--follow", action="store_true", default=False, help="follow files/continuous mode (spool mode only)") parser.add_argument( "--delete", action="store_true", default=False, help="delete spool files") parser.add_argument( "-o", "--output", metavar="<filename>", help="output filename (eg: /var/log/snort/alerts.json") parser.add_argument( "--stdout", action="store_true", default=False, help="also log to stdout if --output is a file") parser.add_argument( "--packet-printable", action="store_true", default=False, help="add packet_printable field to events") parser.add_argument( "--packet-hex", action="store_true", default=False, help="add packet_hex field to events") parser.add_argument( "filenames", nargs="*") args = parser.parse_args() if args.snort_conf: load_from_snort_conf(args.snort_conf, classmap, msgmap) if args.classification_path: classmap.load_from_file( open(os.path.expanduser(args.classification_path))) if args.genmsgmap_path: msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path))) if args.sidmsgmap_path: msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path))) if msgmap.size() == 0: LOG.warning("WARNING: No alert message map entries loaded.") else: LOG.info("Loaded %s rule message map entries.", msgmap.size()) if classmap.size() == 0: LOG.warning("WARNING: No classifications loaded.") else: LOG.info("Loaded %s classifications.", classmap.size()) eve_filter = EveFilter( msgmap, classmap, packet_printable=args.packet_printable, packet_hex=args.packet_hex) outputs = [] if args.output: outputs.append(OutputWrapper(args.output)) if args.stdout: outputs.append(OutputWrapper("-", sys.stdout)) else: outputs.append(OutputWrapper("-", sys.stdout)) writer = Writer(outputs, eve_filter) bookmark = None if args.directory and args.prefix: init_filename, init_offset = None, None if args.bookmark: bookmark = unified2.Unified2Bookmark( args.directory, args.prefix) init_filename, init_offset = bookmark.get() rollover_handler = RolloverHandler(args.delete) reader = unified2.SpoolRecordReader( directory=args.directory, prefix=args.prefix, follow=args.follow, init_filename=init_filename, init_offset=init_offset, rollover_hook=rollover_handler.on_rollover) elif args.filenames: if args.bookmark: LOG.error("Bookmarking not supported in file mode, exiting.") return 1 reader = unified2.FileRecordReader(*args.filenames) else: print("nothing to do.") return event = None last_record_time = time.time() queue = [] while True: flush = False record = reader.next() done = False if not record: if event and time.time() - last_record_time > 1.0: queue.append(event) event = None flush = True else: if args.follow: time.sleep(0.01) else: if event: queue.append(event) flush = True done = True else: last_record_time = time.time() if isinstance(record, unified2.Event): if event is not None: queue.append(event) flush = True event = record elif isinstance(record, unified2.ExtraData): if not event: continue event["extra-data"].append(record) elif isinstance(record, unified2.Packet): if not event: queue.append(record) flush = True else: if "packet" in event: queue.append(record) else: event["packet"] = record if flush: for record in queue: writer.write(record) if args.bookmark and bookmark: location = reader.tell() bookmark.update(*location) queue = [] if done: break
def main(): msgmap = maps.SignatureMap() classmap = maps.ClassificationMap() parser = argparse.ArgumentParser(fromfile_prefix_chars='@', epilog=epilog) parser.add_argument("-C", dest="classification_path", metavar="<classification.config>", help="path to classification config") parser.add_argument("-S", dest="sidmsgmap_path", metavar="<msg-msg.map>", help="path to sid-msg.map") parser.add_argument("-G", dest="genmsgmap_path", metavar="<gen-msg.map>", help="path to gen-msg.map") parser.add_argument( "--snort-conf", dest="snort_conf", metavar="<snort.conf>", help="attempt to load classifications and map files based on the " "location of the snort.conf") parser.add_argument("--directory", metavar="<spool directory>", help="spool directory (eg: /var/log/snort)") parser.add_argument("--prefix", metavar="<spool file prefix>", help="spool filename prefix (eg: unified2.log)") parser.add_argument("--bookmark", metavar="<filename>", help="enable bookmarking") parser.add_argument("--follow", action="store_true", default=False, help="follow files/continuous mode (spool mode only)") parser.add_argument("--cs", metavar="<cybersift ip>", help="Specify the CyberSift Server IP Address") parser.add_argument("--delete", action="store_true", default=False, help="delete spool files") parser.add_argument("--output", metavar="<filename>", help="output filename (eg: /var/log/snort/alerts.json") parser.add_argument("--stdout", action="store_true", default=False, help="also log to stdout if --output is a file") parser.add_argument( "--sort-keys", dest="sort_keys", action="store_true", default=False, help="the output of dictionaries will be sorted by key") parser.add_argument("--verbose", action="store_true", default=False, help="be more verbose") parser.add_argument("filenames", nargs="*") args = parser.parse_args() if args.verbose: LOG.setLevel(logging.DEBUG) if args.snort_conf: load_from_snort_conf(args.snort_conf, classmap, msgmap) if args.cs: elastic_ip = args.cs es = Elasticsearch( ["http://" + elastic_ip + ":80/cybersift_elasticsearch/"], timeout=600) else: LOG.error("Cannot proceed without a valid CyberSift IP") sys.exit(1) if args.classification_path: classmap.load_from_file( open(os.path.expanduser(args.classification_path))) if args.genmsgmap_path: msgmap.load_generator_map(open(os.path.expanduser( args.genmsgmap_path))) if args.sidmsgmap_path: msgmap.load_signature_map(open(os.path.expanduser( args.sidmsgmap_path))) if msgmap.size() == 0: LOG.warn("No alert message map entries loaded.") else: LOG.info("Loaded %s rule message map entries.", msgmap.size()) if classmap.size() == 0: LOG.warn("No classifications loaded.") else: LOG.info("Loaded %s classifications.", classmap.size()) outputs = [] if args.output: outputs.append(OutputWrapper(args.output)) if args.stdout: outputs.append(OutputWrapper("-", sys.stdout)) else: outputs.append(OutputWrapper("-", sys.stdout)) bookmark = None if args.filenames: if args.bookmark: LOG.error("Bookmarking not valid in file mode.") return 1 if args.follow: LOG.error("Follow not valid in file mode.") return 1 if args.delete: LOG.error("Delete not valid in file mode.") return 1 reader = unified2.FileRecordReader(*args.filenames) elif args.directory and args.prefix: if args.bookmark: current_snort_pid = str(check_output(["pgrep", "-u", "snort"])).strip() bookmark = unified2.Unified2Bookmark(filename=args.bookmark + '_' + current_snort_pid) init_filename, init_offset = bookmark.get() else: init_filename = None init_offset = None reader = unified2.SpoolRecordReader( directory=args.directory, prefix=args.prefix, follow=args.follow, rollover_hook=rollover_hook if args.delete else None, init_filename=init_filename, init_offset=init_offset) else: LOG.error("No spool or files provided.") return 1 formatter = Formatter(msgmap=msgmap, classmap=classmap) count = 0 record = True try: while record is not None: record = reader.next() if record is not None: try: as_json = formatter.format(record) if 'event' in as_json: create_snort_module_alert(as_json, es) count += 1 except Exception as err: LOG.error("Failed to encode record as JSON: %s: %s" % (str(err), str(record))) if bookmark: filename, offset = reader.tell() bookmark.update(filename, offset) except unified2.UnknownRecordType as err: if count == 0: LOG.error("%s: Is this a unified2 file?" % (err)) else: LOG.error(err)
def main(): msgmap = maps.SignatureMap() classmap = maps.ClassificationMap() parser = argparse.ArgumentParser( fromfile_prefix_chars='@', epilog=epilog) parser.add_argument( "-C", dest="classification_path", metavar="<classification.config>", help="path to classification config") parser.add_argument( "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>", help="path to sid-msg.map") parser.add_argument( "-G", dest="genmsgmap_path", metavar="<gen-msg.map>", help="path to gen-msg.map") parser.add_argument( "--snort-conf", dest="snort_conf", metavar="<snort.conf>", help="attempt to load classifications and map files based on the " "location of the snort.conf") parser.add_argument( "--directory", metavar="<spool directory>", help="spool directory (eg: /var/log/snort)") parser.add_argument( "--prefix", metavar="<spool file prefix>", help="spool filename prefix (eg: unified2.log)") parser.add_argument( "--bookmark", metavar="<filename>", help="enable bookmarking") parser.add_argument( "--follow", action="store_true", default=False, help="follow files/continuous mode (spool mode only)") parser.add_argument( "--delete", action="store_true", default=False, help="delete spool files") parser.add_argument( "--output", metavar="<filename>", help="output filename (eg: /var/log/snort/alerts.json") parser.add_argument( "--stdout", action="store_true", default=False, help="also log to stdout if --output is a file") parser.add_argument( "--verbose", action="store_true", default=False, help="be more verbose") parser.add_argument( "filenames", nargs="*") args = parser.parse_args() if args.verbose: LOG.setLevel(logging.DEBUG) if args.snort_conf: load_from_snort_conf(args.snort_conf, classmap, msgmap) if args.classification_path: classmap.load_from_file( open(os.path.expanduser(args.classification_path))) if args.genmsgmap_path: msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path))) if args.sidmsgmap_path: msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path))) if msgmap.size() == 0: LOG.warn("No alert message map entries loaded.") else: LOG.info("Loaded %s rule message map entries.", msgmap.size()) if classmap.size() == 0: LOG.warn("No classifications loaded.") else: LOG.info("Loaded %s classifications.", classmap.size()) outputs = [] if args.output: outputs.append(OutputWrapper(args.output)) if args.stdout: outputs.append(OutputWrapper("-", sys.stdout)) else: outputs.append(OutputWrapper("-", sys.stdout)) bookmark = None if args.filenames: if args.bookmark: LOG.error("Bookmarking not valid in file mode.") return 1 if args.follow: LOG.error("Follow not valid in file mode.") return 1 if args.delete: LOG.error("Delete not valid in file mode.") return 1 reader = unified2.FileRecordReader(*args.filenames) elif args.directory and args.prefix: if args.bookmark: bookmark = unified2.Unified2Bookmark(filename=args.bookmark) init_filename, init_offset = bookmark.get() else: init_filename = None init_offset = None reader = unified2.SpoolRecordReader( directory=args.directory, prefix=args.prefix, follow=args.follow, rollover_hook=rollover_hook if args.delete else None, init_filename=init_filename, init_offset=init_offset) else: LOG.error("No spool or files provided.") return 1 formatter = Formatter(msgmap=msgmap, classmap=classmap) count = 0 try: for record in reader: try: as_json = json.dumps(formatter.format(record)) for out in outputs: out.write(as_json) count += 1 except Exception as err: LOG.error("Failed to encode record as JSON: %s: %s" % ( str(err), str(record))) if bookmark: filename, offset = reader.tell() bookmark.update(filename, offset) except unified2.UnknownRecordType as err: if count == 0: LOG.error("%s: Is this a unified2 file?" % (err)) else: LOG.error(err)
def test_multi_file_iteration(self): reader = unified2.FileRecordReader( self.test_filename, self.test_filename) records = list(reader) self.assertEquals(34, len(records)) self.assertEquals(None, reader.next())
def create_producer(): producer = KafkaProducer(bootstrap_servers='mods-kafka-new:9092') #, #value_serializer=lambda x: #dumps(x).encode('latin1')) # Open file to read column_data = {} path = "/var/log/snort/" ## Read file in real time #read_file = read_realtime(path) read_file = "32_unified2.log" print(read_file) reader = unified2.FileRecordReader( read_file) #"/var/log/snort/snort.u2.1594751955") record_prev = None http_uri = None for index, record in enumerate(reader): # Create a unique record = event + packet (same event-id) # print ("###### RECORD TYPE %s "% type(record)) try: if isinstance(record, unified2.Event): extra_data = None #print ("###### RECORD TYPE %s "% type(record) ) record_prev = record aux = 0 continue elif isinstance(record, unified2.ExtraData): # print ("###### RECORD %s "% record) if aux == 0: http_uri = record["httpdata"] aux = 1 else: http_hostname = record["httpdata"] extra_data = [http_uri, http_hostname] http_uri = record["httpdata"] aux = 0 continue elif (isinstance(record, unified2.Packet)) or (isinstance( record, unified2.Buffer)): record_json = dumps( format_json(record, record_prev, extraData=extra_data)) else: # runs only by ExtraDataHdr (not needed to create record for the analysis) continue # Convert the json string to a dict rjson = loads(record_json) record_list = list() for key, value in rjson.items(): print(value) producer.send('SNORT', value=dumps(value).encode('utf-8')) if index > 10000 and record_prev.get("event-id") == 0: break time.sleep(0.01) #remove_old_files(path) except unified2.UnknownRecordType as err: if count == 0: LOG.error("%s: Is this a unified2 file?" % (err)) else: LOG.error(err)
def test_single_file_iteration(self): reader = unified2.FileRecordReader(self.test_filename) records = list(reader) self.assertEqual(17, len(records)) self.assertEqual(None, reader.next())
from idstools import unified2 import glob filename = glob.glob('log/unified2.log.*')[0] reader = unified2.FileRecordReader(filename) totalRecords = 0 totalEvents = 0 totalPackets = 0 totalUDPprot = 0 totalTCPprot = 0 totalICMPprot = 0 totalUDP = 0 totalTCP = 0 totalICMP = 0 for record in reader: if type(record) == unified2.Event: totalEvents += 1 if record['protocol'] == 1: totalICMPprot += 1 elif record['protocol'] == 6: totalTCPprot += 1 elif record['protocol'] == 17: totalUDPprot += 1 if record['signature-id'] == 1000001: totalTCP += 1 elif record['signature-id'] == 1000002: totalUDP += 1 elif record['signature-id'] == 1000003: totalICMP += 1
def main(): msgmap = maps.SignatureMap() classmap = maps.ClassificationMap() parser = argparse.ArgumentParser( fromfile_prefix_chars='@', epilog=epilog) parser.add_argument( "-C", dest="classification_path", metavar="<classification.config>", help="path to classification config") parser.add_argument( "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>", help="path to sid-msg.map") parser.add_argument( "-G", dest="genmsgmap_path", metavar="<gen-msg.map>", help="path to gen-msg.map") parser.add_argument( "--snort-conf", dest="snort_conf", metavar="<snort.conf>", help="attempt to load classifications and map files based on the " "location of the snort.conf") parser.add_argument( "--directory", metavar="<spool directory>", help="spool directory (eg: /var/log/snort)") parser.add_argument( "--prefix", metavar="<spool file prefix>", help="spool filename prefix (eg: unified2.log)") parser.add_argument( "--bookmark", action="store_true", default=False, help="enable bookmarking") parser.add_argument( "--follow", action="store_true", default=False, help="follow files/continuous mode (spool mode only)") parser.add_argument( "--delete", action="store_true", default=False, help="delete spool files") parser.add_argument( "--output", metavar="<filename>", help="output filename (eg: /var/log/snort/alerts.json") parser.add_argument( "--stdout", action="store_true", default=False, help="also log to stdout if --output is a file") parser.add_argument( "filenames", nargs="*") args = parser.parse_args() if args.snort_conf: load_from_snort_conf(args.snort_conf, classmap, msgmap) if args.classification_path: classmap.load_from_file( open(os.path.expanduser(args.classification_path))) if args.genmsgmap_path: msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path))) if args.sidmsgmap_path: msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path))) if msgmap.size() == 0: LOG.warn("WARNING: No alert message map entries loaded.") else: LOG.info("Loaded %s rule message map entries.", msgmap.size()) if classmap.size() == 0: LOG.warn("WARNING: No classifications loaded.") else: LOG.info("Loaded %s classifications.", classmap.size()) outputs = [] if args.output: outputs.append(OutputWrapper(args.output)) if args.stdout: outputs.append(OutputWrapper("-", sys.stdout)) else: outputs.append(OutputWrapper("-", sys.stdout)) if args.directory and args.prefix: reader = unified2.SpoolRecordReader( directory=args.directory, prefix=args.prefix, follow=args.follow, delete=args.delete, bookmark=args.bookmark) elif args.filenames: reader = unified2.FileRecordReader(*args.filenames) else: print("nothing to do.") return formatter = Formatter(msgmap=msgmap, classmap=classmap) for record in reader: as_json = json.dumps(formatter.format(record)) for out in outputs: out.write(as_json)
def run(args): engine = sql.create_engine(args.URI) conn = engine.connect() totalRecords = 0 totalEvents = 0 totalPackets = 0 totalUDPprot = 0 totalTCPprot = 0 totalICMPprot = 0 totalUDP = 0 totalTCP = 0 totalICMP = 0 totalOther = 0 # The number of packets to collect before sending to the database. # This is an optimization! maxPackets = 10000 packetList = [] for filename in args.filename: if (not args.count is None) and totalEvents > args.count: break reader = unified2.FileRecordReader(filename) fileRecords = 0 for record in reader: totalRecords += 1 fileRecords += 1 totalEvents += 1 if (not args.count is None) and totalEvents > args.count: break # Note, this is a tricky little conditional. We don't know it is # an EVent until after the test of the type. So, it # will fail before it trys to access the 'source-ip.raw' # key (which would be an exception if it got that far and # the recors wasn't an event). # We look at the length of 'source-ip.raw' to see if it # is a IPv4 packet or not. We are not setup (yet!) to process # IPv6 packets. if type(record) == unified2.Event and len( record['source-ip.raw']) == 4: ts = record['event-second']+\ record['event-microsecond']/1000000. packetTime = datetime.datetime.utcfromtimestamp(ts) sourceIP = map(int, record['source-ip'].split('.')) destIP = map(int, record['destination-ip'].split('.')) # After the event is the packet that set it off. packet = reader.next() totalRecords += 1 if type(packet) == unified2.Packet: totalPackets += 1 data = packet['data'] else: data = [] packetList.append({ 'time': packetTime, 'sourceIP1': sourceIP[0], 'sourceIP2': sourceIP[1], 'sourceIP3': sourceIP[2], 'sourceIP4': sourceIP[3], 'sourcePort': record['sport-itype'], 'destinationIP1': destIP[0], 'destinationIP2': destIP[1], 'destinationIP3': destIP[2], 'destinationIP4': destIP[3], 'destinationPort': record['dport-icode'], 'signatureID': record['signature-id'], 'packet': data }) # We only submit packets every once in a while to make # the code more efficient. if len(packetList) > maxPackets: print 'inserting %d packets into database' % len( packetList) conn.execute(databaseSetup.packets.insert(), packetList) packetList = [] if record['protocol'] == 1: totalICMPprot += 1 elif record['protocol'] == 6: totalTCPprot += 1 elif record['protocol'] == 17: totalUDPprot += 1 if record['signature-id'] == 1000001: totalTCP += 1 elif record['signature-id'] == 1000002: totalUDP += 1 elif record['signature-id'] == 1000003: totalICMP += 1 else: print record totalOther += 1 # Submit the last few packets in this file. if len(packetList) > 0: print 'inserting %d packets into database' % len(packetList) conn.execute(databaseSetup.packets.insert(), packetList) packetList = [] print 'filename', filename print 'fileRecords', fileRecords conn.close() print 'totalEvents', totalEvents print 'totalRecords', totalRecords print 'totalPackets', totalPackets print 'totalOther', totalOther print 'totalICMPprot', totalICMPprot print 'totalTCPprot', totalTCPprot print 'totalUDPprot', totalUDPprot print 'totalICMP', totalICMP print 'totalTCP', totalTCP print 'totalUDP', totalUDP