示例#1
0
def trim(test=False):
    """
    Implementation of a FIFO log window by trimming logs from X days ago from MongoDB
    by deleting old collections
    """

    # for every log type, delete the collection from a certain amount of days ago (in config)
    for log_type in get_log_types():

        try:
            oldest_date_to_keep = int(get_date_x_days_ago(int(get_config(log_type, 'window'))))
            db = connect_db(log_type, local=test, master=True)

            # grab every collection and if it's older than the log window, drop it
            for collection in db.collection_names():
                collection_date = collection[-8:]
                try:
                    if int(collection_date) < int(oldest_date_to_keep):
                        db.drop_collection(collection)
                except:
                    continue

        except Exception as e:
            print str(e)
            continue
示例#2
0
def trim(test=False):
    """
    Implementation of a FIFO log window by trimming logs from X days ago from MongoDB
    by deleting old collections
    """

    # for every log type, delete the collection from a certain amount of days ago (in config)
    for log_type in get_log_types():

        try:
            oldest_date_to_keep = int(
                get_date_x_days_ago(int(get_config(log_type, 'window'))))
            db = connect_db(log_type, local=test, master=True)

            # grab every collection and if it's older than the log window, drop it
            for collection in db.collection_names():
                collection_date = collection[-8:]
                try:
                    if int(collection_date) < int(oldest_date_to_keep):
                        db.drop_collection(collection)
                except:
                    continue

        except Exception as e:
            print str(e)
            continue
示例#3
0
def get_stats():

    for log_type in get_log_types():

        db = connect_db(log_type)
        log_stats = db.command("dbstats")

        stats_coll = connect_collection('tools', 'db_stats')
        stats_object = stats_coll.update({'db':log_type}, log_stats, upsert=True)
示例#4
0
 def tests(self):
     for case in self.test_list:
         case["dumper"].dump(test=True)
         names = connect_db(case["logtype"], local=True).collection_names()
         coll = None
         names.remove("system.indexes")
         try:
             coll = connect_collection(case["logtype"], names[0], local=True)
             self.assertEqual(coll.count() % 5, 0)
             for test in self.test_keys:
                 assert coll.distinct(test) is not 0
         except Exception as e:
             print case["logtype"] + " had an error"
             print str(e)
             pass
示例#5
0
def print_last_inserts():
    """ return time of last insert for each logtype """

    for log_type in get_log_types():

        newest_coll_name = connect_db(log_type, local=False).collection_names()[-1]
        coll = connect_collection(log_type, newest_coll_name, local=False)

        for document in coll.find().sort([('time', DESCENDING)]).limit(1):
            line = document['line'].split(' ')
            try:
                line.remove('')
            except:
                pass
            print log_type + '\t\t' + str(line[0:3])
示例#6
0
    def setUp(self):
        self.dbs = ["cca", "csacs", "dhcp", "greylist", "header_reject", "named", "nat", "pat", "resnet", "vpn"]
        self.dumpers = [
            cca_dump,
            csacs_dump,
            dhcp_dump,
            greylist_dump,
            header_reject_dump,
            named_dump,
            nat_dump,
            pat_dump,
            resnet_dump,
            vpn_dump,
        ]
        self.queries = [
            {"username": "******"},
            {"mac": "aabbccddeeff"},
            {"recipient": "*****@*****.**"},
            {"from_addr": "*****@*****.**"},
            {"zone": "osupachyderm.org"},
            {"time": 1319029072},
            {"port": "60940"},
            {"username": "******"},
        ]
        self.test_keys = ["username", "mac", "recipient", "from_addr", "zone", "time", "port", "ip"]
        self.test_list = [
            {"dumper": cca_dump, "logtype": "cca"},
            {"dumper": csacs_dump, "logtype": "csacs"},
            {"dumper": dhcp_dump, "logtype": "dhcp"},
            {"dumper": greylist_dump, "logtype": "greylist"},
            {"dumper": header_reject_dump, "logtype": "header_reject"},
            {"dumper": named_dump, "logtype": "named"},
            {"dumper": nat_dump, "logtype": "nat"},
            {"dumper": pat_dump, "logtype": "pat"},
            {"dumper": resnet_dump, "logtype": "resnet"},
            {"dumper": vpn_dump, "logtype": "vpn"},
        ]

        for case in self.test_list:
            db = connect_db(case["logtype"], local=True)
            names = db.collection_names()
            try:
                names.remove("system.indexes")
            except:
                pass
示例#7
0
def compact(test=False):
    """
    Compact (defrag/free) every collection
    """

    for log_type in get_log_types():
        try:
            db = connect_db(log_type, local=test, master=True)

            for coll_name in db.collection_names():
                coll = connect_collection(log_type, coll_name)
                try:
                    print "compacting " + coll_name
                    db.eval("db.runCommand({ compact : '%s'})" % (coll_name))
                except Exception as e:
                    print str(e)
                    continue

        except Exception as e:
            print str(e)
            continue
示例#8
0
    Prints statistics summarizing the frequency of each key
    in a collection of a Mongo database.  Helpful as a
    diagnostic tool.
    """
<<<<<<< Updated upstream

    # count all documents in this collection
    total = collection.count()
    #f or log_type in get_log_types():
=======
#    for log_type in get_log_types():
>>>>>>> Stashed changes
    for log_type in ['aruba']:

        # find all collections in the database
        database = connect_db(log_type, local=True)
        collnames = database.collection_names()
        try:
            collnames.remove('system.indexes')
            collnames.remove('system.users')
        except:
            pass

        # loop over collections names
        for collname in collnames:
            collection = connect_collection(log_type, collname, local=True)
<<<<<<< Updated upstream
>>>>>>> Stashed changes
=======
>>>>>>> Stashed changes