Exemplo n.º 1
0
    def test_iteration(self):

        test_filename = "tests/multi-record-event.log"

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")
        shutil.copy(test_filename, "%s/unified2.log.1382627902" % self.tmpdir)
        self.assertEquals(len(list(reader)), 17)
Exemplo n.º 2
0
    def test_next(self):

        test_filename = "tests/multi-record-event.log"

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")
        assert reader.next() is None

        # Copy in a file.
        shutil.copy(test_filename, "%s/unified2.log.1382627900" % self.tmpdir)

        # Should be able to get 17 records.
        for i in range(17):
            assert reader.next() is not None

        # The next record should be None.
        assert reader.next() is None

        # Copy in another file.
        shutil.copy(test_filename, "%s/unified2.log.1382627901" % self.tmpdir)

        # Should be able to get 17 records.
        for _ in range(17):
            assert reader.next() is not None

        # Copy in 2 more files, should be able to get 34 records
        # sequentially.
        shutil.copy(test_filename, "%s/unified2.log.1382627902" % self.tmpdir)
        shutil.copy(test_filename, "%s/unified2.log.1382627903" % self.tmpdir)
        for _ in range(17 * 2):
            assert reader.next() is not None
Exemplo n.º 3
0
    def test_next_with_unpexted_eof(self):

        buf = io.BytesIO(open("tests/multi-record-event.log", "rb").read())

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")

        # Write out a couple bytes and try to read.
        open("%s/unified2.log.0001" % self.tmpdir, "wb").write(buf.read(6))
        assert reader.next() is None

        # Write out the rest of file.
        open("%s/unified2.log.0001" % self.tmpdir, "ab").write(buf.read())
        assert reader.next() is not None
Exemplo n.º 4
0
    def test_get_filenames(self):

        shutil.copy("tests/merged.log", "%s/unified2.log.0001" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0002" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0003" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0004" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/asdf.log.0004" % (self.tmpdir))

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")
        filenames = reader.get_filenames()
        self.assertEqual(len(filenames), 4)
        for filename in filenames:
            self.assertTrue(filename.startswith("unified2.log"))
Exemplo n.º 5
0
    def __init__(self, dd_controller):

        self.url_attackDected = "config-ids:ids/attack_detected"

        self.attack_map = {}
        self.attack_map['1000006'] = "port_scan"
        self.attack_map['1000031'] = "ping_flood"

        self.reader = unified2.SpoolRecordReader("/var/log/snort",
                                                 "snort.log",
                                                 follow=True)

        self.ddController = dd_controller
        self.idsParser = IdsParser()
Exemplo n.º 6
0
    def test_with_growing_file(self):

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")

        log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab")
        log_file.write(open(self.test_filename, "rb").read())
        log_file.close()
        for i in range(17):
            self.assertTrue(reader.next() is not None)
        self.assertTrue(reader.next() is None)

        log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab")
        log_file.write(open(self.test_filename, "rb").read())
        log_file.close()
        for i in range(17):
            self.assertTrue(reader.next() is not None)
        self.assertTrue(reader.next() is None)
Exemplo n.º 7
0
    def test_open_at_bookmark(self):

        # Create a spool directory with some files...
        shutil.copy(self.test_filename, "%s/unified2.log.0001" % (self.tmpdir))
        shutil.copy(self.test_filename, "%s/unified2.log.0002" % (self.tmpdir))

        # Make the 3rd one a concatenation of itself so we know a valid offset.
        open("%s/unified2.log.0003" % self.tmpdir, "ab").write(
            open(self.test_filename, "rb").read())
        open("%s/unified2.log.0003" % self.tmpdir, "ab").write(
            open(self.test_filename, "rb").read())

        # Now create the reader with a bookmark .
        reader = unified2.SpoolRecordReader(
            self.tmpdir, "unified2.log", "unified2.log.0003", 38950)

        # Now we should only read 17 records.
        self.assertEquals(len(list(reader)), 17)
Exemplo n.º 8
0
    def get_events(cls, path=None, file=None):

        if path is None:
            path = cls.CONFIG['DEFAULT_LOG_PATH']

        if file is None:
            file = cls.CONFIG['DEFAULT_LOG_FILE']

        events = []

        try:
            sigmap = maps.SignatureMap()
            sigmap.load_generator_map(open(cls.CONFIG.get('GEN_MAP_PATH')))
            sigmap.load_signature_map(open(cls.CONFIG.get('SIG_MAP_PATH')))

            reader = unified2.SpoolRecordReader(path, file, follow=False)

            for record in reader:
                if isinstance(record, unified2.Event):

                    event_details = sigmap.get(record['generator-id'],
                                               record['signature-id'])

                    event = NetworkEvent(cls.__name__)
                    events.append(event)

                    event.src_ip = record['source-ip']
                    event.dest_ip = record['destination-ip']
                    event.protocol = record['protocol']
                    event.src_port = record['sport-itype']
                    event.dest_port = record['dport-icode']
                    event.signature = event_details[
                        'msg'] if event_details else 'SID: {}'.format(
                            record['signature-id'])
                    if event_details:
                        event.reference = json.dumps(event_details['ref'])

        except Exception as e:
            flash(e.__class__.__name__, "error")

        return events
Exemplo n.º 9
0
    def test_open_next(self):

        reader = unified2.SpoolRecordReader(self.tmpdir, "unified2")
        self.assertEquals(None, reader.open_next())

        shutil.copy("tests/merged.log", "%s/unified2.log.0001" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0002" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0003" % (self.tmpdir))
        shutil.copy("tests/merged.log", "%s/unified2.log.0004" % (self.tmpdir))

        next_filename = reader.open_next()
        self.assertEquals("unified2.log.0001", next_filename) 
        next_filename = reader.open_next()
        self.assertEquals("unified2.log.0002", next_filename) 
        next_filename = reader.open_next()
        self.assertEquals("unified2.log.0003", next_filename) 
        next_filename = reader.open_next()
        self.assertEquals("unified2.log.0004", next_filename) 

        next_filename = reader.open_next()
        self.assertEquals(None, next_filename)
        self.assertTrue(reader.fileobj is not None)
Exemplo n.º 10
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(
        fromfile_prefix_chars='@', epilog=epilog)
    parser.add_argument(
        "-C", dest="classification_path", metavar="<classification.config>",
        help="path to classification config")
    parser.add_argument(
        "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>",
        help="path to sid-msg.map")
    parser.add_argument(
        "-G", dest="genmsgmap_path", metavar="<gen-msg.map>",
        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf", dest="snort_conf", metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument(
        "--directory", metavar="<spool directory>",
        help="spool directory (eg: /var/log/snort)")
    parser.add_argument(
        "--prefix", metavar="<spool file prefix>",
        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument(
        "--bookmark", action="store_true", default=False,
        help="enable bookmarking")
    parser.add_argument(
        "--follow", action="store_true", default=False,
        help="follow files/continuous mode (spool mode only)")
    parser.add_argument(
        "--delete", action="store_true", default=False,
        help="delete spool files")
    parser.add_argument(
        "-o", "--output", metavar="<filename>",
        help="output filename (eg: /var/log/snort/alerts.json")
    parser.add_argument(
        "--stdout", action="store_true", default=False,
        help="also log to stdout if --output is a file")
    parser.add_argument(
        "--packet-printable", action="store_true", default=False,
        help="add packet_printable field to events")
    parser.add_argument(
        "--packet-hex", action="store_true", default=False,
        help="add packet_hex field to events")
    parser.add_argument(
        "filenames", nargs="*")
    args = parser.parse_args()

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warning("WARNING: No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warning("WARNING: No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    eve_filter = EveFilter(
        msgmap, classmap, packet_printable=args.packet_printable,
        packet_hex=args.packet_hex)

    outputs = []

    if args.output:
        outputs.append(OutputWrapper(args.output))
        if args.stdout:
            outputs.append(OutputWrapper("-", sys.stdout))
    else:
        outputs.append(OutputWrapper("-", sys.stdout))

    writer = Writer(outputs, eve_filter)

    bookmark = None

    if args.directory and args.prefix:
        init_filename, init_offset = None, None
        if args.bookmark:
            bookmark = unified2.Unified2Bookmark(
                args.directory, args.prefix)
            init_filename, init_offset = bookmark.get()
        rollover_handler = RolloverHandler(args.delete)
        reader = unified2.SpoolRecordReader(
            directory=args.directory,
            prefix=args.prefix,
            follow=args.follow,
            init_filename=init_filename,
            init_offset=init_offset,
            rollover_hook=rollover_handler.on_rollover)
    elif args.filenames:
        if args.bookmark:
            LOG.error("Bookmarking not supported in file mode, exiting.")
            return 1
        reader = unified2.FileRecordReader(*args.filenames)
    else:
        print("nothing to do.")
        return

    event = None
    last_record_time = time.time()
    queue = []

    while True:
        flush = False
        record = reader.next()
        done = False
        if not record:
            if event and time.time() - last_record_time > 1.0:
                queue.append(event)
                event = None
                flush = True
            else:
                if args.follow:
                    time.sleep(0.01)
                else:
                    if event:
                        queue.append(event)
                    flush = True
                    done = True
        else:

            last_record_time = time.time()

            if isinstance(record, unified2.Event):
                if event is not None:
                    queue.append(event)
                    flush = True
                event = record
            elif isinstance(record, unified2.ExtraData):
                if not event:
                    continue
                event["extra-data"].append(record)
            elif isinstance(record, unified2.Packet):
                if not event:
                    queue.append(record)
                    flush = True
                else:
                    if "packet" in event:
                        queue.append(record)
                    else:
                        event["packet"] = record

        if flush:
            for record in queue:
                writer.write(record)
            if args.bookmark and bookmark:
                location = reader.tell()
                bookmark.update(*location)
            queue = []

        if done:
            break
Exemplo n.º 11
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(fromfile_prefix_chars='@', epilog=epilog)
    parser.add_argument("-C",
                        dest="classification_path",
                        metavar="<classification.config>",
                        help="path to classification config")
    parser.add_argument("-S",
                        dest="sidmsgmap_path",
                        metavar="<msg-msg.map>",
                        help="path to sid-msg.map")
    parser.add_argument("-G",
                        dest="genmsgmap_path",
                        metavar="<gen-msg.map>",
                        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf",
        dest="snort_conf",
        metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument("--directory",
                        metavar="<spool directory>",
                        help="spool directory (eg: /var/log/snort)")
    parser.add_argument("--prefix",
                        metavar="<spool file prefix>",
                        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument("--bookmark",
                        metavar="<filename>",
                        help="enable bookmarking")
    parser.add_argument("--follow",
                        action="store_true",
                        default=False,
                        help="follow files/continuous mode (spool mode only)")
    parser.add_argument("--cs",
                        metavar="<cybersift ip>",
                        help="Specify the CyberSift Server IP Address")
    parser.add_argument("--delete",
                        action="store_true",
                        default=False,
                        help="delete spool files")
    parser.add_argument("--output",
                        metavar="<filename>",
                        help="output filename (eg: /var/log/snort/alerts.json")
    parser.add_argument("--stdout",
                        action="store_true",
                        default=False,
                        help="also log to stdout if --output is a file")
    parser.add_argument(
        "--sort-keys",
        dest="sort_keys",
        action="store_true",
        default=False,
        help="the output of dictionaries will be sorted by key")
    parser.add_argument("--verbose",
                        action="store_true",
                        default=False,
                        help="be more verbose")
    parser.add_argument("filenames", nargs="*")
    args = parser.parse_args()

    if args.verbose:
        LOG.setLevel(logging.DEBUG)

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.cs:
        elastic_ip = args.cs
        es = Elasticsearch(
            ["http://" + elastic_ip + ":80/cybersift_elasticsearch/"],
            timeout=600)
    else:
        LOG.error("Cannot proceed without a valid CyberSift IP")
        sys.exit(1)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(
            args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(
            args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warn("No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warn("No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    outputs = []

    if args.output:
        outputs.append(OutputWrapper(args.output))
        if args.stdout:
            outputs.append(OutputWrapper("-", sys.stdout))
    else:
        outputs.append(OutputWrapper("-", sys.stdout))

    bookmark = None

    if args.filenames:
        if args.bookmark:
            LOG.error("Bookmarking not valid in file mode.")
            return 1
        if args.follow:
            LOG.error("Follow not valid in file mode.")
            return 1
        if args.delete:
            LOG.error("Delete not valid in file mode.")
            return 1
        reader = unified2.FileRecordReader(*args.filenames)
    elif args.directory and args.prefix:
        if args.bookmark:
            current_snort_pid = str(check_output(["pgrep", "-u",
                                                  "snort"])).strip()
            bookmark = unified2.Unified2Bookmark(filename=args.bookmark + '_' +
                                                 current_snort_pid)
            init_filename, init_offset = bookmark.get()
        else:
            init_filename = None
            init_offset = None
        reader = unified2.SpoolRecordReader(
            directory=args.directory,
            prefix=args.prefix,
            follow=args.follow,
            rollover_hook=rollover_hook if args.delete else None,
            init_filename=init_filename,
            init_offset=init_offset)
    else:
        LOG.error("No spool or files provided.")
        return 1

    formatter = Formatter(msgmap=msgmap, classmap=classmap)

    count = 0

    record = True

    try:
        while record is not None:
            record = reader.next()
            if record is not None:
                try:
                    as_json = formatter.format(record)
                    if 'event' in as_json:
                        create_snort_module_alert(as_json, es)
                    count += 1
                except Exception as err:
                    LOG.error("Failed to encode record as JSON: %s: %s" %
                              (str(err), str(record)))
                if bookmark:
                    filename, offset = reader.tell()
                    bookmark.update(filename, offset)
    except unified2.UnknownRecordType as err:
        if count == 0:
            LOG.error("%s: Is this a unified2 file?" % (err))
        else:
            LOG.error(err)
Exemplo n.º 12
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(
        fromfile_prefix_chars='@', epilog=epilog)
    parser.add_argument(
        "-C", dest="classification_path", metavar="<classification.config>",
        help="path to classification config")
    parser.add_argument(
        "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>",
        help="path to sid-msg.map")
    parser.add_argument(
        "-G", dest="genmsgmap_path", metavar="<gen-msg.map>",
        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf", dest="snort_conf", metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument(
        "--directory", metavar="<spool directory>",
        help="spool directory (eg: /var/log/snort)")
    parser.add_argument(
        "--prefix", metavar="<spool file prefix>",
        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument(
        "--bookmark", metavar="<filename>", help="enable bookmarking")
    parser.add_argument(
        "--follow", action="store_true", default=False,
        help="follow files/continuous mode (spool mode only)")
    parser.add_argument(
        "--delete", action="store_true", default=False,
        help="delete spool files")
    parser.add_argument(
        "--output", metavar="<filename>",
        help="output filename (eg: /var/log/snort/alerts.json")
    parser.add_argument(
        "--stdout", action="store_true", default=False,
        help="also log to stdout if --output is a file")
    parser.add_argument(
        "--verbose", action="store_true", default=False,
        help="be more verbose")
    parser.add_argument(
        "filenames", nargs="*")
    args = parser.parse_args()

    if args.verbose:
        LOG.setLevel(logging.DEBUG)

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warn("No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warn("No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    outputs = []

    if args.output:
        outputs.append(OutputWrapper(args.output))
        if args.stdout:
            outputs.append(OutputWrapper("-", sys.stdout))
    else:
        outputs.append(OutputWrapper("-", sys.stdout))

    bookmark = None

    if args.filenames:
        if args.bookmark:
            LOG.error("Bookmarking not valid in file mode.")
            return 1
        if args.follow:
            LOG.error("Follow not valid in file mode.")
            return 1
        if args.delete:
            LOG.error("Delete not valid in file mode.")
            return 1
        reader = unified2.FileRecordReader(*args.filenames)
    elif args.directory and args.prefix:
        if args.bookmark:
            bookmark = unified2.Unified2Bookmark(filename=args.bookmark)
            init_filename, init_offset = bookmark.get()
        else:
            init_filename = None
            init_offset = None
        reader = unified2.SpoolRecordReader(
            directory=args.directory,
            prefix=args.prefix,
            follow=args.follow,
            rollover_hook=rollover_hook if args.delete else None,
            init_filename=init_filename,
            init_offset=init_offset)
    else:
        LOG.error("No spool or files provided.")
        return 1

    formatter = Formatter(msgmap=msgmap, classmap=classmap)

    count = 0

    try:
        for record in reader:
            try:
                as_json = json.dumps(formatter.format(record))
                for out in outputs:
                    out.write(as_json)
                count += 1
            except Exception as err:
                LOG.error("Failed to encode record as JSON: %s: %s" % (
                    str(err), str(record)))
            if bookmark:
                filename, offset = reader.tell()
                bookmark.update(filename, offset)
    except unified2.UnknownRecordType as err:
        if count == 0:
            LOG.error("%s: Is this a unified2 file?" % (err))
        else:
            LOG.error(err)
Exemplo n.º 13
0
parser = create_parser()
args = parser.parse_args()

assert os.path.exists(
    args.directory), args.directory + ': no such file or directory'
assert os.path.exists(os.path.dirname(
    args.output)), os.path.dirname(args.output) + ': no such file or directory'

msgmap = maps.SignatureMap()
classmap = maps.ClassificationMap()
formatter = Formatter(msgmap=msgmap, classmap=classmap)

reader = unified2.SpoolRecordReader(directory=args.directory,
                                    prefix=args.prefix,
                                    follow=True,
                                    init_filename=None,
                                    init_offset=None)

labels = [
    'generator-id', 'signature-id', 'blocked', 'source-ip', 'dport-icode'
]

alert_count = {}
text_collector = {}
last_time = time.time()
for record in reader:
    formatted = dict(formatter.format(record))
    if not 'event' in formatted:
        continue
    event = formatted['event']
Exemplo n.º 14
0
# -*- coding: utf-8 -*-
#import idstools
from idstools import unified2


def rollover_hook(closed, opened):
    os.unlink(closed)


reader = unified2.SpoolRecordReader("C:\Users\lele\Desktop",
                                    "snort.unified2",
                                    rollover_hook=rollover_hook,
                                    follow=True)
for record in reader:
    print(record)
Exemplo n.º 15
0
HOST_IP = 'redis'
# PROXY_GRPC = 'proxy-access-control:50054'

logging.basicConfig(filename='alert.log', level=logging.DEBUG)

connect = False
while not connect:
    try:
        r = redis.StrictRedis(host=HOST_IP, port=6379, db=0)
        r.delete('snort_events')
        connect = True
    except Exception as e:
        logging.debug(e)
        connect = False

reader = unified2.SpoolRecordReader("/var/log/snort", "", follow=True)


def sendGrpcAlert(event_id, redis_key):
    try:
        channel = grpc.insecure_channel('proxy-access-control:50054')
        stub = nginx_pb2_grpc.ControllerStub(channel)
        stub.ProcessAlerts(
            nginx_pb2.AlertMessage(event_id=event_id, redis_key=redis_key))
    except Exception as e:
        logging.debug(e)


for record in reader:
    try:
        if isinstance(record, unified2.Event):
Exemplo n.º 16
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(
        fromfile_prefix_chars='@', epilog=epilog)
    parser.add_argument(
        "-C", dest="classification_path", metavar="<classification.config>", 
        help="path to classification config")
    parser.add_argument(
        "-S", dest="sidmsgmap_path", metavar="<msg-msg.map>", 
        help="path to sid-msg.map")
    parser.add_argument(
        "-G", dest="genmsgmap_path", metavar="<gen-msg.map>", 
        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf", dest="snort_conf", metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument(
        "--directory", metavar="<spool directory>",
        help="spool directory (eg: /var/log/snort)")
    parser.add_argument(
        "--prefix", metavar="<spool file prefix>",
        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument(
        "--bookmark", action="store_true", default=False,
        help="enable bookmarking")
    parser.add_argument(
        "--follow", action="store_true", default=False,
        help="follow files/continuous mode (spool mode only)")
    parser.add_argument(
        "--delete", action="store_true", default=False,
        help="delete spool files")
    parser.add_argument(
        "--output", metavar="<filename>",
        help="output filename (eg: /var/log/snort/alerts.json")
    parser.add_argument(
        "--stdout", action="store_true", default=False,
        help="also log to stdout if --output is a file")
    parser.add_argument(
        "filenames", nargs="*")
    args = parser.parse_args()

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warn("WARNING: No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warn("WARNING: No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    outputs = []

    if args.output:
        outputs.append(OutputWrapper(args.output))
        if args.stdout:
            outputs.append(OutputWrapper("-", sys.stdout))
    else:
        outputs.append(OutputWrapper("-", sys.stdout))

    if args.directory and args.prefix:
        reader = unified2.SpoolRecordReader(
            directory=args.directory,
            prefix=args.prefix,
            follow=args.follow,
            delete=args.delete,
            bookmark=args.bookmark)
    elif args.filenames:
        reader = unified2.FileRecordReader(*args.filenames)
    else:
        print("nothing to do.")
        return

    formatter = Formatter(msgmap=msgmap, classmap=classmap)

    for record in reader:
        as_json = json.dumps(formatter.format(record))
        for out in outputs:
            out.write(as_json)