Пример #1
0
    def test_bookmarking(self):
        """Test that when bookmarking is used, a second invocation of
        the SpoolEventReader picks up where it left off.

        """

        for i in range(2):
            with open("%s/unified2.log.%04d" % (self.tmpdir, i),
                      "ab") as outfile:
                with open(self.test_filename, "rb") as test_file:
                    outfile.write(test_file.read())

        reader = unified2.SpoolEventReader(self.tmpdir,
                                           "unified2",
                                           bookmark=True)

        event = reader.next()
        self.assertIsNotNone(event)
        print(reader.bookmark.get())
        bookmark_filename, bookmark_offset = reader.bookmark.get()
        self.assertEqual(bookmark_filename, "unified2.log.0000")

        # The offset should be the offset at end of the first event,
        # even though the offset of the underlying file has moved on.
        self.assertEqual(bookmark_offset, 38950)
        self.assertEqual(reader.reader.tell()[1], 68)

        # Now create a new SpoolEventReader, the underlying offset
        # should be our bookmark locations.
        reader = unified2.SpoolEventReader(self.tmpdir,
                                           "unified2",
                                           bookmark=True)
        bookmark_filename, bookmark_offset = reader.bookmark.get()
        underlying_filename, underlying_offset = reader.reader.tell()

        self.assertEqual(bookmark_filename, "unified2.log.0000")
        self.assertEqual(bookmark_offset, 38950)

        self.assertEqual(bookmark_filename,
                         os.path.basename(underlying_filename))
        self.assertEqual(bookmark_offset, underlying_offset)

        # Read the next and final event, and check bookmark location.
        self.assertIsNotNone(reader.next())
        self.assertIsNone(reader.next())
        bookmark_filename, bookmark_offset = reader.bookmark.get()
        self.assertEqual(bookmark_filename, "unified2.log.0001")
        self.assertEqual(bookmark_offset, 38950)

        # As this was the last event, the underlying location should
        # be the same as the bookmark.
        bookmark_filename, bookmark_offset = reader.bookmark.get()
        underlying_filename, underlying_offset = reader.reader.tell()
        self.assertEqual(bookmark_filename,
                         os.path.basename(underlying_filename))
        self.assertEqual(bookmark_offset, underlying_offset)
Пример #2
0
    def test_eof(self):
        """ Basic test of the SpoolEventReader aggregrating spool reader. """

        reader = unified2.SpoolEventReader(self.tmpdir, "unified2")
        shutil.copy(
            self.test_filename, "%s/unified2.log.1382627900" % self.tmpdir)
        self.assertTrue(isinstance(reader.next(), unified2.Event))
        self.assertTrue(reader.next() is None)
Пример #3
0
    def test_with_file_rotation(self):

        reader = unified2.SpoolEventReader(self.tmpdir, "unified2")

        for i in range(2):
            open("%s/unified2.log.%04d" % (self.tmpdir, i), "ab").write(
                open(self.test_filename, "rb").read())
            self.assertTrue(isinstance(reader.next(), unified2.Event))

        self.assertTrue(reader.next() is None)
Пример #4
0
    def read_ids_events(self):
        reader = unified2.SpoolEventReader(self.directory,
                                           self.prefix,
                                           follow=True,
                                           bookmark=True,
                                           delete=self.delete_files)

        for alert in reader:
            asyncio.run_coroutine_threadsafe(self.process_alert(alert),
                                             loop=self.loop)
Пример #5
0
    def test_with_growing_file(self):

        reader = unified2.SpoolEventReader(self.tmpdir, "unified2")

        log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab")
        log_file.write(open(self.test_filename, "rb").read())
        log_file.close()
        self.assertTrue(isinstance(reader.next(), unified2.Event))
        self.assertTrue(reader.next() is None)

        log_file = open("%s/unified2.log.0001" % (self.tmpdir), "ab")
        log_file.write(open(self.test_filename, "rb").read())
        log_file.close()
        self.assertTrue(isinstance(reader.next(), unified2.Event))
        self.assertTrue(reader.next() is None)
Пример #6
0
from idstools import unified2
import re
import json
import string
import random
import memcache

mc = memcache.Client(['127.0.0.1:11211'], debug=1)

reader = unified2.SpoolEventReader("/opt/telepath/suricata/logs/",
                                   "unified2.alert",
                                   follow=True)

rules = {}
with open("/opt/telepath/suricata/suricata.rules") as f:
    content = f.readlines()

for line in content:
    result = re.match(r'.*msg:\"([^\"]+)\".*sid:([^;]+)', line)
    if result:
        rules[result.groups()[1]] = result.groups()[0]

#print rules

for record in reader:

    if 'signature-id' in record and 'destination-ip' in record and 'source-ip' in record and 'event-second' in record:

        out = {
            "ts": record['event-second'],
            "rule": rules[str(record['signature-id'])],
Пример #7
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
    parser.add_argument("-C",
                        dest="classification_path",
                        metavar="<classification.config>",
                        help="path to classification config")
    parser.add_argument("-S",
                        dest="sidmsgmap_path",
                        metavar="<msg-msg.map>",
                        help="path to sid-msg.map")
    parser.add_argument("-G",
                        dest="genmsgmap_path",
                        metavar="<gen-msg.map>",
                        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf",
        dest="snort_conf",
        metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument("--directory",
                        metavar="<spool directory>",
                        help="spool directory (eg: /var/log/snort)")
    parser.add_argument("--prefix",
                        metavar="<spool file prefix>",
                        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument("--bookmark",
                        action="store_true",
                        default=False,
                        help="enable bookmarking")
    parser.add_argument("--follow",
                        action="store_true",
                        default=False,
                        help="follow files/continuous mode (spool mode only)")
    parser.add_argument("filenames", nargs="*")
    args = parser.parse_args()

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(
            args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(
            args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warn("WARNING: No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warn("WARNING: No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    if args.directory and args.prefix:
        reader = unified2.SpoolEventReader(directory=args.directory,
                                           prefix=args.prefix,
                                           follow=args.follow,
                                           bookmark=args.bookmark)

        for event in reader:
            print_event(event, msgmap, classmap)

    elif args.filenames:
        reader = unified2.FileEventReader(*args.filenames)
        for event in reader:
            print_event(event, msgmap, classmap)

    else:
        parser.print_help()
        return 1
Пример #8
0
def main():

    msgmap = maps.SignatureMap()
    classmap = maps.ClassificationMap()

    parser = argparse.ArgumentParser(fromfile_prefix_chars='@', epilog=epilog)
    parser.add_argument("-C",
                        dest="classification_path",
                        metavar="<classification.config>",
                        help="path to classification config")
    parser.add_argument("-S",
                        dest="sidmsgmap_path",
                        metavar="<msg-msg.map>",
                        help="path to sid-msg.map")
    parser.add_argument("-G",
                        dest="genmsgmap_path",
                        metavar="<gen-msg.map>",
                        help="path to gen-msg.map")
    parser.add_argument(
        "--snort-conf",
        dest="snort_conf",
        metavar="<snort.conf>",
        help="attempt to load classifications and map files based on the "
        "location of the snort.conf")
    parser.add_argument("--directory",
                        metavar="<spool directory>",
                        help="spool directory (eg: /var/log/snort)")
    parser.add_argument("--prefix",
                        metavar="<spool file prefix>",
                        help="spool filename prefix (eg: unified2.log)")
    parser.add_argument("--bookmark",
                        action="store_true",
                        default=False,
                        help="enable bookmarking")
    parser.add_argument("--follow",
                        action="store_true",
                        default=False,
                        help="follow files/continuous mode (spool mode only)")
    parser.add_argument("--delete",
                        action="store_true",
                        default=False,
                        help="delete spool files")
    parser.add_argument("--output",
                        metavar="<filename>",
                        help="output filename (eg: /var/log/snort/alerts.json")
    parser.add_argument("--stdout",
                        action="store_true",
                        default=False,
                        help="also log to stdout if --output is a file")
    parser.add_argument("filenames", nargs="*")
    args = parser.parse_args()

    if args.snort_conf:
        load_from_snort_conf(args.snort_conf, classmap, msgmap)

    if args.classification_path:
        classmap.load_from_file(
            open(os.path.expanduser(args.classification_path)))
    if args.genmsgmap_path:
        msgmap.load_generator_map(open(os.path.expanduser(
            args.genmsgmap_path)))
    if args.sidmsgmap_path:
        msgmap.load_signature_map(open(os.path.expanduser(
            args.sidmsgmap_path)))

    if msgmap.size() == 0:
        LOG.warn("WARNING: No alert message map entries loaded.")
    else:
        LOG.info("Loaded %s rule message map entries.", msgmap.size())

    if classmap.size() == 0:
        LOG.warn("WARNING: No classifications loaded.")
    else:
        LOG.info("Loaded %s classifications.", classmap.size())

    eve_filter = EveFilter(msgmap, classmap)

    outputs = []

    if args.output:
        outputs.append(OutputWrapper(args.output))
        if args.stdout:
            outputs.append(OutputWrapper("-", sys.stdout))
    else:
        outputs.append(OutputWrapper("-", sys.stdout))

    if args.directory and args.prefix:
        reader = unified2.SpoolEventReader(directory=args.directory,
                                           prefix=args.prefix,
                                           follow=args.follow,
                                           delete=args.delete,
                                           bookmark=args.bookmark)
    elif args.filenames:
        reader = unified2.FileEventReader(*args.filenames)
    else:
        print("nothing to do.")
        return

    for event in reader:
        try:
            encoded = json.dumps(eve_filter.filter(event))
            for out in outputs:
                out.write(encoded)
        except Exception as err:
            LOG.error("Failed to encode record as JSON: %s: %s" %
                      (str(err), str(event)))
Пример #9
0
#Init GEOIP data for IP details
geo_lite_city = pygeoip.GeoIP('/usr/local/lookups/GeoLiteCity.dat')
geo_ip_asn = pygeoip.GeoIP('/usr/local/lookups/GeoIPASNum.dat')
logging.warning('Loaded latest ASN and City info')

nowtimedom = datetime.datetime.now()
updatedurationdom = datetime.timedelta(minutes=5)
updatetimedom = nowtimedom + updatedurationdom
# FOR ASN and City Info
nowtime = datetime.datetime.now()
updateduration = datetime.timedelta(hours=6)
updatetime = nowtime + updateduration
reader = unified2.SpoolEventReader("/var/log/snort",
                                   "snort.u2.*",
                                   follow=True,
                                   delete=False,
                                   bookmark=True)
httplist = []
max_buffer_size = 1024
nowtime = datetime.datetime.now()
timeduration = datetime.timedelta(seconds=5)
endtime = nowtime + timeduration
dlog = DnifLogger(AsyncHttpConsumer(url, buffer_size=max_buffer_size))
dlog.start()
try:
    for event in reader:
        if datetime.datetime.now() > updatetime:
            try:
                geo_lite_city = pygeoip.GeoIP('GeoLiteCity.dat')
                geo_ip_asn = pygeoip.GeoIP('GeoIPASNum.dat')