コード例 #1
0
def archive_collections(CONFIG, age=90):

    logger = logging.getLogger("collection_archive")

    # Parse my General Configuration
    if isinstance(CONFIG, dict):
        config_items = CONFIG
    elif isinstance(CONFIG, str):
        config_items = manoward.get_manoward(explicit_config=CONFIG)
    else:
        raise TypeError("No Configuration Given.")

    db_conn = manoward.get_conn(
        config_items, prefix="store_", tojq=".database", ac_def=True)

    cur = db_conn.cursor()

    archive_ts = int(time.time())

    logger.debug("Archive ts: {}".format(archive_ts))

    populate_archive_sql = '''REPLACE INTO collection_archive
                                SELECT * FROM collection
                                WHERE
                                last_update < FROM_UNIXTIME(%s) -  interval %s DAY ; '''

    remove_overachieving_sql = '''DELETE FROM collection
                                    WHERE last_update < FROM_UNIXTIME(%s) - interval %s DAY ; '''

    archive_args = [archive_ts, age]

    copy_action = manoward.run_query(cur,
                                     populate_archive_sql,
                                     args=archive_args,
                                     require_results=False,
                                     do_abort=False)

    if copy_action["has_error"] is True:
        logger.error("{}Had an Error When Running Archive. Ignoring Delete{}".format(
            Fore.RED, Style.RESET_ALL))
    else:
        # Run Delete
        logger.info("Archive Worked Swimmingly. Let's Go Ahead and Delete.")

        delete_action = manoward.run_query(cur,
                                           remove_overachieving_sql,
                                           args=archive_args,
                                           require_results=False,
                                           do_abort=False)

        if delete_action["has_error"] is True:
            logger.error("{}Error when deleting the Excess.{}".format(
                Fore.RED, Style.RESET_ALL))
        else:
            logger.info("{}Collection Table Archived {}".format(
                Fore.GREEN, Style.RESET_ALL))
コード例 #2
0
ファイル: collate.py プロジェクト: chalbersma/manowar
    def get_current_table_data(db_config, table):

        db_conn = manoward.get_conn(db_config,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)

        cur = db_conn.cursor()

        grab_current_table_list = list()
        grab_current_table_list.append(
            "SELECT {0}_id, {0}_text, fk_audits_id, {0}_passed, {0}_failed, {0}_exempt "
            .format(table))
        grab_current_table_list.append("FROM audits_by_{0}".format(table))
        grab_current_table_list.append(
            "WHERE {0}_last_audit >= now() - INTERVAL %s SECOND".format(table))
        grab_current_table_list.append(
            "group by {0}_text, fk_audits_id".format(table))

        grab_ctl_args = [db_config["collate"]["freshseconds"]]

        grab_current_table_query = " ".join(grab_current_table_list)

        # print(grab_current_table_query)
        try:
            cur.execute(grab_current_table_query, grab_ctl_args)
        except Exception as grab_ctl_query_error:
            logger.error(
                "Unable to grab current table list table {}".format(table))
        else:
            if cur.rowcount:
                query_results_list = cur.fetchall()
            else:
                query_results_list = list()
        finally:
            cur.close()

        return query_results_list
コード例 #3
0
ファイル: sapicheck.py プロジェクト: chalbersma/manowar
def grab_all_sapi(config_items):

    logger = logging.getLogger("sapicheck")

    all_hosts = list()

    db_conn = manoward.get_conn(
        config_items, prefix="store_", tojq=".database", ac_def=True)
    db_cur = db_conn.cursor(pymysql.cursors.DictCursor)

    all_sapi_hosts_sql = "select hostname from sapiActiveHosts where last_updated >= (now() - INTERVAL 3 DAY) "

    try:
        db_cur.execute(all_sapi_hosts_sql)

        results = db_cur.fetchall()
    except Exception as sapi_query_error:

        logger.warning("{}{}Unable to Grab all SAPI Hosts.{}".format(Back.WHITE,
                                                                     Fore.RED,
                                                                     Style.RESET_ALL))
        logger.debug("Error : {}".format(sapi_query_error))

    else:

        for host in results:
            # Pop host onto array
            all_hosts.append(host["hostname"])

    # Retun the hosts I've found, if non will return an empty list.
    finally:

        db_cur.close()
        db_conn.close()

    return all_hosts
コード例 #4
0
    def before_request():
        try:

            g.db = manoward.get_conn(config_items,
                                     prefix="api_",
                                     tojq=".database",
                                     ac_def=True)

            g.logger = logger

            g.debug = FDEBUG

        except Exception as connection_error:
            logger.debug("Connection to DB Error Abandoing Connection Error.")

            return str(connection_error)

        # Endpoint Authorization List of Endosements and Restrictions that Define what you may and may not access
        # For endpoints with fine grained controls

        g.session_endorsements = list()
        g.session_restrictions = list()

        # Open a New Cursor for this Request
        # Get Username
        username = "******"

        try:
            auth_header = request.headers.get("Authorization")
            uname_pass_64 = auth_header.split()[1]
            decoded_uname_pass = base64.b64decode(uname_pass_64).decode(
                "utf-8")
            username = decoded_uname_pass.split(":")[0]

        except Exception as no_auth_error:
            logger.debug(
                "No Authentication token given, either local access or IP whitelist : {}"
                .format(no_auth_error))
            username = "******"
            g.session_endorsements.append(("conntype", "whitelist"))
            g.session_restrictions.append(("conntype", "whitelist"))

            logger.warning("Local Flask or Whitelist IP detected.")

        else:

            # Parsing was successful add ldap endorsment
            g.session_endorsements.append(("conntype", "ldap"))
            g.session_restrictions.append(("conntype", "ldap"))

        finally:
            logger.debug("User {} Connected a Session.".format(username))
            g.USERNAME = username

        # Robot Authentication
        try:
            robot_header = ast.literal_eval(
                request.headers.get("robotauth", "False"))

            logger.debug("Robot Header : {}".format(robot_header))

            if robot_header is True:
                # I need to do robot auth
                username, apikey = auth_header.split(':')

                g.USERNAME = username

                auth_cursor = g.db.cursor(pymysql.cursors.DictCursor)

                # Integrating Token Types. For now new tokens types will be processed
                # here. In the future this "endorsements" section will be replaced
                # with logic to look at our new centralized authorization system.

                anyvalid = False
                for tokentype in ["robot", "sapi", "ipintel"]:

                    key_valid = validate_key(username=username,
                                             giventoken=apikey,
                                             dbcur=auth_cursor,
                                             tokentype=tokentype)

                    print(tokentype, key_valid)

                    if key_valid is True:
                        # Add Robots Endorsement and Restriction
                        g.session_endorsements.append(("conntype", tokentype))
                        g.session_restrictions.append(("conntype", tokentype))
                        anyvalid = True
                        # No break loop
                        break
                if anyvalid is False:
                    # No Valid Token was found
                    logger.warning("Robot Token Out of Date.")
                    abort(403)

                auth_cursor.close()

            else:
                # This isn't a robot call
                pass
        except AttributeError as attribute_error:
            logger.error(
                "Attribute Error parsing Robot Items. Killing. {}".format(
                    attribute_error))
            abort(403)
        except SyntaxError as syntax_error:
            logger.error(
                "Syntax Error parsing Robot Items. Killing. {}".format(
                    syntax_error))
            abort(500)
        finally:
            pass

        # Default Endorsement
        g.session_endorsements.append(("username", "{}".format(g.USERNAME)))

        # DEFAULT Fresh. Use this instead of "Fresh" Values to allow for query caching.
        NOW = int(time.time())
        seconds_after_midnight = NOW % 86400
        MIDNIGHT = NOW - seconds_after_midnight
        oldest = MIDNIGHT - (86400 * 2)

        g.NOW = NOW
        g.MIDNIGHT = MIDNIGHT
        g.twoDayTimestamp = oldest
        g.cur = g.db.cursor(pymysql.cursors.DictCursor)
        g.HTTPENDPOINT = config_items["webserver"]["accesslink"]
        g.config_items = config_items

        logger.debug("Current Session Endorsements : {}".format(
            g.session_endorsements))
コード例 #5
0
ファイル: collate.py プロジェクト: chalbersma/manowar
    def grab_single_collated(db_config, result_enum, type_to_grab):

        logger = logging.getLogger("grab_single_collated")

        db_conn = manoward.get_conn(db_config,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)

        cur = db_conn.cursor()

        # Check Result Enum
        if result_enum not in ["pass", "fail", "notafflicted"]:
            raise Exception(
                "Result Enum not in pass/fail/notafflicted. Instead it's : ",
                str(results_enum))

        if type_to_grab not in ["acoll", "pop", "srvtype"]:
            raise Exception(
                "Type to Grab Unknown. Not in acoll, pop, srvtype instead it's : ",
                str(type_to_grab))

        if type_to_grab == "acoll":
            grouper = "audits.audit_name"
            table = "audits_by_acoll"
        elif type_to_grab == "pop":
            grouper = "hosts.pop"
            table = "audits_by_pop"
        elif type_to_grab == "srvtype":
            grouper = "hosts.srvtype"
            table = "audits_by_srvtype"

        grab_single_collated_query_list = list()
        grab_single_collated_query_list.append(
            "SELECT " + grouper +
            ", fk_audits_id, count(DISTINCT fk_host_id) ")
        grab_single_collated_query_list.append("FROM audits_by_host ")
        grab_single_collated_query_list.append(
            "join hosts on fk_host_id =  hosts.host_id ")
        grab_single_collated_query_list.append(
            "join audits on fk_audits_id =  audits.audit_id ")
        grab_single_collated_query_list.append("WHERE")
        grab_single_collated_query_list.append("audit_result = %s ")
        grab_single_collated_query_list.append(
            "and last_audit >= FROM_UNIXTIME(%s) ")
        grab_single_collated_query_list.append("group by " + grouper +
                                               ", fk_audits_id")

        grab_scq_args = [result_enum, twodaytimestamp]

        grab_single_collated_query = " ".join(grab_single_collated_query_list)

        # print(grab_single_collated_query)

        try:
            gsqq_debug = cur.mogrify(grab_single_collated_query, grab_scq_args)
            logger.debug("GSQQ Debug : {}".format(gsqq_debug))
            cur.execute(grab_single_collated_query, grab_scq_args)
        except Exception as gscq_query_error:
            logger.error(
                "Unable to Grab GSCQ Error for Group : {} on type {}".format(
                    grouper, type_to_grab))
            raise gscq_query_error
        else:

            if cur.rowcount:
                query_results_list = cur.fetchall()
            else:
                # No Results
                # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                query_results_list = list()
        finally:
            cur.close()

        return query_results_list
コード例 #6
0
ファイル: collate.py プロジェクト: chalbersma/manowar
    def compare_update_insert_table(collected, currently_on_disk, table,
                                    db_config):

        logger = logging.getLogger("compare_update_insert_table")

        updates = 0
        inserts = 0

        db_conn = manoward.get_conn(db_config,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)

        COLLATE_TIME = int(time())

        cur = db_conn.cursor()

        collected_index = list()

        # Hydrate Collecteded Items
        for index in collected.keys():

            for this_audit_id in collected[index].keys():

                collected_index.append((index, this_audit_id))

                # Hydrate this
                if "pass" not in collected[index][this_audit_id].keys():
                    collected[index][this_audit_id]["pass"] = 0

                if "fail" not in collected[index][this_audit_id].keys():
                    collected[index][this_audit_id]["fail"] = 0

                if "notafflicted" not in collected[index][this_audit_id].keys(
                ):
                    collected[index][this_audit_id]["notafflicted"] = 0

        #print("Hydrated Collections", collected)
        #print("Collected Index", collected_index)

        # Compare My Stuff (Get a list of update hosts and insert hosts
        #print("Current Amounts", currently_on_disk)

        # IDs to Update Used in SQL
        update_ids = [
            item[0] for item in currently_on_disk
            if item[1] in collected.keys() and item[2] in collected[item[1]].
            keys() and collected[item[1]][item[2]]["pass"] == item[3]
            and collected[item[1]][item[2]]["fail"] == item[4]
            and collected[item[1]][item[2]]["notafflicted"] == item[5]
        ]

        # Items to Insert
        # Part of What's Needed
        current_text_location = dict()

        for i in range(0, len(currently_on_disk)):

            #print(currently_on_disk[i][1], currently_on_disk[i][2])

            if currently_on_disk[i][1] not in current_text_location.keys():
                current_text_location[currently_on_disk[i][1]] = dict()

            if currently_on_disk[i][2] not in current_text_location[
                    currently_on_disk[i][1]].keys():
                current_text_location[currently_on_disk[i][1]][
                    currently_on_disk[i][2]] = i

        #print("On Disk Lookups", current_text_location)

        insert_list = list()
        insert_list = [
            [
                item[0], item[1], COLLATE_TIME, COLLATE_TIME,
                collected[item[0]][item[1]]["pass"],
                collected[item[0]][item[1]]["fail"],
                collected[item[0]][item[1]]["notafflicted"]
            ] for item in collected_index
            if item[0] not in current_text_location.keys()
            or item[1] not in current_text_location[item[0]].keys() or
            (collected[item[0]][item[1]]["pass"],
             collected[item[0]][item[1]]["fail"],
             collected[item[0]][item[1]]["notafflicted"]) !=
            (currently_on_disk[current_text_location[item[0]][item[1]]][3],
             currently_on_disk[current_text_location[item[0]][item[1]]][4],
             currently_on_disk[current_text_location[item[0]][item[1]]][5])
        ]

        logger.debug("Update IDs : {}".format(update_ids))
        logger.debug("Insert List : {}".format(insert_list))

        try:
            if len(update_ids) > 0:
                #update_ids_string = ",".join(map(str, update_ids))
                # Update ID's will now be used as a query paramertization list
                update_ids_parameters = [" %s " for x in update_ids]
                update_ids_string = ",".join(map(str, update_ids_parameters))
                update_query_parameters = [str(COLLATE_TIME)]
                update_query_parameters.extend(update_ids)
                # Query has been parameterized
                update_query = "UPDATE "
                update_query = update_query + "audits_by_" + table + " SET " + table +\
                    "_last_audit = FROM_UNIXTIME( %s ) where " +\
                    table +\
                    "_id in ( " + update_ids_string + " ) "

                try:
                    update_query_debug = cur.mogrify(update_query,
                                                     update_query_parameters)

                    logger.info(
                        "Running Update Query for table : {}".format(table))
                    logger.debug("{}".format(table, update_query_debug))

                    cur.execute(update_query, update_query_parameters)
                except Exception as update_query_error:
                    logger.error("Error updating hosts for {} : {}".format(
                        table, update_query_error))
                else:
                    updates += len(update_ids)

        except Exception as update_error:
            logger.error("Error doing Updates. {}".format(update_error))

        # Inserts

        try:
            if len(insert_list) > 0:

                # Only do this is there's stuff.
                insert_query = []
                # This query is properly paramaterized and the table value is properly
                # hardcoded earlier in the program. I'm noseccing it.
                insert_query.append(
                    "INSERT into audits_by_{0} ( {0}_text, ".format(
                        table))  # nosec
                insert_query.append(
                    "fk_audits_id, {0}_initial_audit, {0}_last_audit, {0}_passed, {0}_failed, {0}_exempt ) "
                    .format(table))
                insert_query.append(
                    "VALUES( %s, %s, FROM_UNIXTIME(%s), FROM_UNIXTIME(%s), %s, %s, %s ) "
                )

                insert_query_string = " ".join(insert_query)

                try:
                    cur.executemany(insert_query_string, insert_list)
                except Exception as insert_query_error:
                    logger.error("Error doing Inserts for {} : {}".format(
                        table, insert_error))
                else:
                    inserts += len(insert_list)

        except Exception as insert_error:
            logger.error("Error doing Inserts : {}".format(insert_error))

        return updates, inserts
コード例 #7
0
def storage(config_items, hostdata, sapi=False):
    '''
    Does a Storage of an Object.
    '''

    logger = logging.getLogger("storage.py")

    STORAGE_TIME = int(time())
    storage_stats = dict()

    MAX = config_items["storage"]["collectionmaxchars"]

    storage_stats = dict()
    storage_stats["storage_timestamp"] = STORAGE_TIME

    db_conn = manoward.get_conn(config_items,
                                prefix="store_",
                                tojq=".database",
                                ac_def=False)

    try:
        try:
            host_id = insert_update_host(hostdata, db_conn)
        except Exception as insert_update_host_error:
            logger.error("{}Unable to Update Host with Error : {}{}".format(
                Fore.RED, insert_update_host_error, Style.RESET_ALL))

            raise insert_update_host_error
        else:
            logger.info(host_id)

        # Unique Data FTM
        hostname = hostdata["collection_hostname"]

        storage_stats["collection_timestamp"] = hostdata[
            "collection_timestamp"]

        try:
            storage_stats["inserts"], storage_stats["updates"], storage_stats[
                "errors"] = insert_update_collections(db_conn, host_id,
                                                      hostdata, MAX)
        except Exception as insert_update_collections_error:
            logger.error(
                "{}Unable to Update Collections associated with {}{}".format(
                    Fore.RED, hostname, Style.RESET_ALL))
            logger.debug("Error : {}".format(insert_update_collections_error))

            raise insert_update_collections_error

    except Exception as dbconnection_error:
        logger.error("{}Error Updating Host Collecitons {}{}".format(
            Fore.RED, dbconnection_error, Style.RESET_ALL))

        storage_stats["insert_update"] = 0
        storage_stats["errors"] = 1
    else:
        logger.info("{}Updating Collection Success{}{}".format(
            Fore.GREEN, storage_stats, Style.RESET_ALL))

        # Updating Collection has been a success Let's check if this is a sapi host.
        if sapi is True:
            # I am so update sapi table
            storage_stats["sapi_data"] = store_as_SAPI_host(host_id=host_id,
                                                            db_conn=db_conn,
                                                            hostname=hostname)

        do_ipintel = config_items["ip_intel"].get("do_intel", False)

        logger.debug("Doing IP Intel. ({} Statement).".format(do_ipintel))

        if do_ipintel is True and "ip_intel" in hostdata.keys():
            # Process the IP Intelligence for this host
            result = manoward.process_ip_intel(
                config_dict=config_items,
                multireport=hostdata["ip_intel"],
                host_id=host_id)
            bad_results = [res for res in result if res not in (200, 202)]
            if len(bad_results) == 0:
                logger.info("{}IP Intel : {} for host {}{}".format(
                    Fore.GREEN, result, hostname, Style.RESET_ALL))
            else:
                logger.error("{}IP Intel : {} for host {}{}".format(
                    Fore.RED, result, hostname, Style.RESET_ALL))

    try:
        db_conn.commit()
        db_conn.close()
    except Exception as e:
        logger.error("{}Error Closing DB Connection{}".format(
            Fore.RED, Style.RESET_ALL))

    if __name__ == "__main__":
        print(json.dumps(storage_stats, sort_keys=True, indent=4))

    return storage_stats
コード例 #8
0
def analyze(CONFIGDIR, CONFIG, newer=-1):
    '''
    The Main driver for the analyze process. Will load and process analyze items.
    '''

    logger = logging.getLogger("analyze.py:analyze")

    #ANALYZE_TIME = int(time())

    # Parse my General Configuration
    if isinstance(CONFIG, dict):
        config_items = CONFIG
    elif isinstance(CONFIG, str):
        config_items = manoward.get_manoward(explicit_config=CONFIG)
    else:
        raise TypeError("No Configuration Given.")

    logger.debug("Configuration Items: {}".format(config_items))

    if isinstance(CONFIGDIR, dict):
        logger.debug("CONFIGDIR is given from external process.")
        audits = CONFIGDIR
    elif isinstance(CONFIGDIR, list):
        # Grab all my Audits in CONFIGDIR Stuff
        auditfiles = audittools.walk_auditd_dir(CONFIGDIR)

        # Read all my Audits
        audits = dict()
        for auditfile in auditfiles:

            these_audits = audittools.load_auditfile(auditfile)

            for found_audit_name in these_audits.keys():
                if found_audit_name in audits.keys():
                    logger.warning(
                        "Duplicate definition for {} found. Ignoring definition in file {}"
                        .format(found_audit_name, auditfile))
                else:
                    # Add that audit
                    audits[found_audit_name] = these_audits[found_audit_name]

    def grab_host_list(db_conn, FRESH=172800):
        # Grab a Host List

        logger = logging.getLogger("grab_host_list")

        db_cur = db_conn.cursor(pymysql.cursors.DictCursor)

        host_query = '''select host_id, pop, srvtype, last_update
                        from hosts
                        where last_update >= now() - INTERVAL %s SECOND'''

        host_query_args = [FRESH]

        try:
            host_list_debug_query = db_cur.mogrify(host_query, host_query_args)
            logger.debug("hostslist Query : {}".format(host_list_debug_query))
            db_cur.execute(host_query, host_query_args)
        except Exception as hlq_error:
            logger.error("Unable to Query for Hostslist.")
            logger.debug("Error : {}".format(hlq_error))
            all_hosts = list()
            amount_of_hosts = 0
            host_good = False
        else:
            all_hosts = db_cur.fetchall()

            amount_of_hosts = len(all_hosts)

            host_good = bool(amount_of_hosts > 0)

        finally:
            db_cur.close()

        return host_good, amount_of_hosts, all_hosts

    def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName,
                          return_dict, audit_id):

        # Note that db config items is the same as config_itesm

        logger = logging.getLogger("analyze_one_audit")

        logger.debug("Attempting to Analyze Audit {}/{}".format(
            auditName, audit_id))

        try:
            # I multithread like a boss now. :) JK But I need to give each audit it's own conn to the DB:
            db_conn = manoward.get_conn(db_config_items,
                                        prefix="analyze_",
                                        tojq=".database",
                                        ac_def=True)

            #
            host_buckets = dict()
            host_comparison = dict()

            # New Results Variables for Future Reference
            pop_results = dict()
            srvtype_results = dict()
            audit_results = dict()

            # Add Insert_Update Counters
            audit_host_inserts = 0
            audit_host_updates = 0

            # Debug Host No More

            # Create Bucket Objects from Config
            for bucket in oneAudit["filters"]:
                host_buckets[bucket] = []
                host_comparison[bucket] = []

            # Bucket Hosts Left (All the hosts before I start silly
            items_left_to_bucket = list_of_hosts

            logger.debug(
                "{} Preparing to Analyze {} Hosts with {} Buckets".format(
                    auditName, len(items_left_to_bucket),
                    len(oneAudit["filters"].keys())))

            for bucket in oneAudit["filters"]:

                logger.debug("{} Processing Bucket {}".format(
                    auditName, bucket))

                this_mtype = oneAudit["filters"][bucket]["filter-match"]
                this_ctype = oneAudit["filters"][bucket][
                    "filter-collection-type"]
                this_csubtype = oneAudit["filters"][bucket][
                    "filter-collection-subtype"]
                this_mvalue = oneAudit["filters"][bucket]["filter-match-value"]

                #print(this_mtype, this_ctype, this_csubtype, this_mvalue)
                try:
                    bucket_results = generic_large_compare(
                        db_conn,
                        items_left_to_bucket,
                        this_mtype,
                        this_ctype,
                        this_csubtype,
                        this_mvalue,
                        FRESH,
                        exemptfail=True,
                        audit_name=auditName)
                except Exception as glc_bucket_results_error:
                    logger.error(
                        "Error on Generic Large Compare on bucket {} : audit {}"
                        .format(bucket, auditName))
                    logger.warning(
                        "Maybe no Hosts for Bucket {} on audit {}".format(
                            bucket, auditName))
                    logger.debug("Error : {}".format(glc_bucket_results_error))
                else:
                    # Grab just the items that passed
                    if bucket == "bionic-bucket":
                        logger.info(
                            "{} Bionic Bucket Filter Results {}".format(
                                auditName, bucket_results))

                    for result in bucket_results:
                        if "pfe" in result.keys():
                            if result["pfe"] == "pass":
                                # Remove pfe & pfevalue from this host so it can be checked again
                                try:
                                    del result["pfe"]
                                    del result["pfevalue"]
                                    # Add my stripped result to the host bucket.
                                    # print(result)
                                    host_buckets[bucket].append(result)
                                except Exception as e:
                                    print("Error adding host to host buckets",
                                          e)

                    # Make an index of just the host ids
                    this_bucket_ids = [
                        gotem["host_id"] for gotem in host_buckets[bucket]
                    ]

                    # Grab just the items that haven't been bucketd yet (so I don't have to compare everything, everytime)
                    items_left_to_bucket = [
                        host_id for host_id in list_of_hosts
                        if host_id not in this_bucket_ids
                    ]

            # Host Bucketing
            for comparison in host_buckets.keys():
                # print(comparison)

                if len(host_buckets[comparison]) > 0:
                    try:
                        this_mtype = oneAudit["comparisons"][comparison][
                            "comparison-match"]
                        this_ctype = oneAudit["comparisons"][comparison][
                            "comparison-collection-type"]
                        this_csubtype = oneAudit["comparisons"][comparison][
                            "comparison-collection-subtype"]
                        this_mvalue = oneAudit["comparisons"][comparison][
                            "comparison-match-value"]
                        #print(this_mtype, this_ctype, this_csubtype, this_mvalue)
                    except Exception as comparison_error:
                        logger.error(
                            "Error grabbing comparisons for audit {} : {}".
                            format(auditName, comparison_error))
                    else:
                        # Check What Type
                        logger.debug(
                            "{} Running Comparison on Bucket {}/{}".format(
                                auditName, comparison,
                                len(host_buckets[comparison])))

                        if this_mtype in [
                                "subnonhere", "suballhere", "subknowall"
                        ]:
                            # Add Massive Subtype
                            try:
                                comparison_results = subtype_large_compare(
                                    db_conn, host_buckets[comparison],
                                    this_mtype, this_ctype, this_csubtype,
                                    this_mvalue, FRESH)
                            except Exception as subtype_large_compare_error:
                                logger.error(
                                    "{} Error on Subtype Large Compare on Comparison for bucket {}"
                                    .format(auditName, comparison))

                                logger.debug("Error : {}".format(
                                    subtype_large_compare_error))
                            else:
                                host_comparison[
                                    comparison] = comparison_results

                        else:
                            # Generic Comparison
                            try:
                                comparison_results = generic_large_compare(
                                    db_conn, host_buckets[comparison],
                                    this_mtype, this_ctype, this_csubtype,
                                    this_mvalue, FRESH)
                                # print(comparison_results)
                            except Exception as generic_large_compare_error:
                                logger.error(
                                    "{} Error on Generic Large Compare on Comparison for bucket {}"
                                    .format(auditName, comparison))

                                logger.debug("Error : {}".format(
                                    generic_large_compare_error))
                            else:
                                host_comparison[
                                    comparison] = comparison_results
                else:
                    # Possible Future Nothing to Compare for {} bucket on audit blah message
                    pass

            #bucket in host_bucket
            #print(auditName, " Results : ", host_comparison)
            logger.debug(host_comparison)
            logger.info(host_comparison)
            massinserts = 0
            massupdates = 0
            massinserts, massupdates = generic_large_analysis_store(
                db_conn, audit_id, host_comparison, FRESH)

            # Return Dict is a manager.dict() so the "above" process knows what changes here.
            return_dict["host_inserts"] = massinserts
            return_dict["host_updates"] = massupdates

        except Exception as analyze_error:
            logger.error("Error doing analyze for {} : {}".format(
                auditName, analyze_error))
            logger.debug(analyze_error)
            sys.exit(1)
        else:
            sys.exit(0)

    def dequeue_hosts(db_config_items, list_of_hosts):

        logger = logging.getLogger("analyze:dequeue_hosts")

        while True:
            # Pull Stats Stuff
            # Pull Enqueued Host
            global audit_host_inserts
            global audit_host_updates
            #global srvtype_results
            #global pop_results
            #global audit_results

            # print(audit_results)

            try:
                oneAudit, auditName = audit_queue.get()
            except Exception as audit_get_error:
                logger.error("Failure to Pull Items off of Queue.")
                logger.debug("Error : {}".format(audit_get_error))

                audit_queue.task_done()

                # Abnormal Return
                return

            try:
                manager = multiprocessing.Manager()
            except Exception as multiprocess_error:
                logger.error(
                    "Failure to Create Manager for audit {} with error {}".
                    format(auditName, multiprocess_error))
                audit_queue.task_done()

                # Abnormal Return
                return
            else:
                return_dict = manager.dict()

            # Insert Update the Audit in the Database
            try:
                audit_id = insert_update_audit(db_config_items, oneAudit)
            except Exception as update_audit_db_error:
                logger.error(
                    "Failure to Create Audit {} in DB with error {}".format(
                        auditName, update_audit_db_error))
                audit_queue.task_done()
                return
            else:
                oneAudit["audit_id"] = audit_id
                logger.debug(
                    "Stored a Record about audit {}/{} in the database.".
                    format(auditName, audit_id))

            #print("Pulled Host ", this_one_host_array)
            # Process One Host Pass it the host_array and the config_array
            try:
                #analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, return_dict, audit_id)
                # analyze_audit_process is a new instance for every new thread we make.
                try:
                    analyze_audit_process = multiprocessing.Process(
                        target=analyze_one_audit,
                        args=(db_config_items, list_of_hosts, oneAudit,
                              auditName, return_dict, audit_id))
                    analyze_audit_process.name = auditName
                    analyze_audit_process.daemon = True
                    analyze_audit_process.start()
                except Exception as analyze_pid_error:
                    logger.error("Error with Analyze Audit {} : {}".format(
                        auditName, analyze_pid_error))

                    analyze_audit_process.terminate()
                else:

                    while multiprocessing.Process.is_alive(
                            analyze_audit_process) == True:
                        logger.debug("Waiting for: {} {}".format(
                            auditName,
                            multiprocessing.Process.is_alive(
                                analyze_audit_process)))

                        # Waith 45 Seconds before Asking again
                        sleep(45)

                    analyze_audit_process.join()

            except Exception as audit_analyisis_error:
                logger.error("Failure to Analyze Audit {} : {}".format(
                    auditName, audit_analyisis_error))

            # I/U Stats only Thing Left
            logger.debug(return_dict)
            try:
                with audit_host_counts_lock:
                    logger.info("{} I:{} U:{}".format(
                        auditName, return_dict["host_inserts"],
                        return_dict["host_updates"]))

                    # This is a Global
                    audit_host_inserts += return_dict["host_inserts"]
                    audit_host_updates += return_dict["host_updates"]

            except Exception as metrics_error:
                # print(return_dict)
                logger.error(
                    "Failure on Audit when Recording Metrics {} : {}".format(
                        auditName, metrics_error))
            audit_queue.task_done()

        return

    def analyze_all_audits(db_config_items, list_of_hosts, FRESH, MAXTHREADS):
        # Audits are a global variable
        logger = logging.getLogger("analyze_all_audits")

        logger.debug("Apparantly I'm not using fresh {}".format(FRESH))

        # Copy Time
        results_host = deepcopy(list_of_hosts)

        # Create My ThreadPool
        for x in range(MAXTHREADS):
            # This is the magic. It calls dequeu hostsk
            t = threading.Thread(target=dequeue_hosts,
                                 args=(db_config_items, list_of_hosts))
            # Make Threads Die if Parent is Killed
            t.daemon = True
            # Start my Threads
            t.start()

        # Counter for Time Spent
        start = time()

        for audit in audits:
            # Populate Audit Queue
            logger.info("About to Queue audit {}".format(audit))
            # try:
            this_queue_item = [audits[audit], audit]
            audit_queue.put(this_queue_item)

            # Sleep to allow for better placement
            sleep(1)

        # If your running verbosely Print out this stuff Else not
        while audit_queue.unfinished_tasks > 0:

            nowtime = time() - start

            logger.debug("---------------------------------------")
            logger.debug("AuditsLeft : {}".format(
                audit_queue.unfinished_tasks))
            logger.debug("QSize : {}".format(audit_queue.qsize()))
            logger.debug("Thread : {}".format(threading.active_count()))
            logger.debug("QStuff : {}".format(audit_queue.empty()))
            logger.debug("Time : {}".format(nowtime))
            logger.debug("---------------------------------------")
            # Give me an Update every 30 seconds
            sleep(15)

        # When I'm Not Verbose Just wait and don't say shit.
        # Otherwise when I see a small number of unfinished tasks Let's move back an djoin.
        audit_queue.join()

        jobtime = time() - start

        return audit_host_inserts, audit_host_updates, jobtime

    def insert_update_audit(db_config_items, audit):

        #logger = logging.getLogger("analyze:insert_update_audit")

        # Literals
        this_audit_name = audit["vuln-name"]
        this_audit_short = audit["vuln-short-description"]
        this_audit_long_description = re.sub(
            r'[\'|\;]', r'', audit["vuln-long-description"][:511])
        this_audit_primary_link = audit["vuln-primary-link"]
        this_audit_secondary_links = audit["vuln-additional-links"]
        this_audit_filters = audit["filters"]
        this_audit_comparison = audit["comparisons"]
        this_audit_filename = audit["filename"]
        this_audit_priority = audit.get("vuln-priority", 5)

        db_conn = manoward.get_conn(db_config_items,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)

        cur = db_conn.cursor()

        # Always Match by audit_name
        grab_audit = "SELECT audit_id from audits where audit_name = %s ; "
        grab_audit_paramaters = [str(this_audit_name)]

        cur.execute(grab_audit, grab_audit_paramaters)

        if cur.rowcount:
            # There's Data
            this_audit_id = cur.fetchone()[0]
            have_audit_id = True
        else:
            have_audit_id = False

        replace_query_args = list()

        # Always Specified Columns
        columns = """audit_name, audit_short_description, audit_long_description,
                    audit_primary_link, audit_secondary_links, audit_filters,
                    audit_comparison, audit_priority, filename """

        dynamic_column_items = list()
        dynamic_column_args = list()

        # print(this_audit_secondary_links)

        for secondary_link in this_audit_secondary_links:
            # print(secondary_link)
            dynamic_column_items.append(" %s , %s ")

            # Add to Args List
            dynamic_column_args.append(str(secondary_link))
            dynamic_column_args.append(
                str(this_audit_secondary_links[secondary_link]))

        # Make this a list of double %s 's
        # Put it in the thing
        dynamic_column = " , ".join(dynamic_column_items)
        dynamic_column = "COLUMN_CREATE(" + dynamic_column + ")"

        # Values String
        this_audit_values = " %s , %s , %s , %s , " + dynamic_column + ", %s " + \
            " , %s , %s , %s "

        this_audit_value_paramaters = [
            str(this_audit_name),
            str(this_audit_short)[:63],
            str(this_audit_long_description),
            str(this_audit_primary_link)
        ]
        this_audit_value_paramaters.extend(dynamic_column_args)

        temp_list = [
            str(this_audit_filters).replace('\'', '"')[:511],
            str(this_audit_comparison).replace('\'', '"')[:511],
            str(this_audit_priority),
            str(this_audit_filename)
        ]

        this_audit_value_paramaters.extend(temp_list)

        query_head = "REPLACE into audits ( "
        query_mid = " ) VALUES ( "

        query_tail = " ) "

        if have_audit_id:
            # Have Audit ID so add it, otherwise these won't be added
            columns = columns + " , audit_id "
            this_audit_values = this_audit_values + ", %s "
            this_audit_value_paramaters.append(str(this_audit_id))

        query_string = query_head + columns + query_mid + this_audit_values + query_tail

        # This is a replace and will update the audit no matter.
        debug_sql = cur.mogrify(query_string, this_audit_value_paramaters)
        cur.execute(query_string, this_audit_value_paramaters)

        this_row = cur.lastrowid

        cur.close()
        return this_row

    # Globals
    global pop_results
    global srvtype_results
    global audit_results
    # Inserts Updates
    global audit_host_inserts
    global audit_host_updates

    # Results dictionaries
    pop_results = dict()
    #pop_results_lock = threading.Lock()
    srvtype_results = dict()
    #srvtype_results_lock = threading.Lock()
    audit_results = dict()
    #audit_results_lock = threading.Lock()

    audit_host_inserts = 0
    audit_host_updates = 0
    audit_host_counts_lock = threading.Lock()

    # COnfig ITems
    #MAX = config_items["storage"]["collectionmaxchars"]
    FRESH = config_items["analyze"]["freshseconds"]
    MAXTHREADS = int(config_items["analyze"]["maxthreads"])

    # Create A Queue
    audit_queue = Queue()

    # try:
    try:
        db_conn = manoward.get_conn(config_items,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)
        dbmessage = "Connected"
    except Exception as db_conn_error:
        dbmessage = "Unable to Connect"
        logger.debug("DB Connection Error : {}".format(db_conn_error))
    finally:
        # Start my analyze_stats with data
        analyze_stats = {"db-status": dbmessage}

    # Grab Hosts List (Still Single Threaded)
    host_good, analyze_stats["FreshHosts"], host_list = grab_host_list(
        db_conn, FRESH)

    if host_good:
        logger.info("Successfully Collected {} Hosts as 'Live'".format(
            len(host_list)))

        analyze_stats["HostCollectionStatus"] = "Success"

        analyze_stats["audit_inserts"], analyze_stats[
            "audit_updates"], analyze_stats["jobtime"] = analyze_all_audits(
                config_items, host_list, FRESH, MAXTHREADS)

        analyze_stats["threads"] = str(MAXTHREADS)
        analyze_stats["totalaudits"] = len(audits)

    else:
        analyze_stats["HostCollectionStatus"] = "Failed"

    return analyze_stats
コード例 #9
0
    def insert_update_audit(db_config_items, audit):

        #logger = logging.getLogger("analyze:insert_update_audit")

        # Literals
        this_audit_name = audit["vuln-name"]
        this_audit_short = audit["vuln-short-description"]
        this_audit_long_description = re.sub(
            r'[\'|\;]', r'', audit["vuln-long-description"][:511])
        this_audit_primary_link = audit["vuln-primary-link"]
        this_audit_secondary_links = audit["vuln-additional-links"]
        this_audit_filters = audit["filters"]
        this_audit_comparison = audit["comparisons"]
        this_audit_filename = audit["filename"]
        this_audit_priority = audit.get("vuln-priority", 5)

        db_conn = manoward.get_conn(db_config_items,
                                    prefix="analyze_",
                                    tojq=".database",
                                    ac_def=True)

        cur = db_conn.cursor()

        # Always Match by audit_name
        grab_audit = "SELECT audit_id from audits where audit_name = %s ; "
        grab_audit_paramaters = [str(this_audit_name)]

        cur.execute(grab_audit, grab_audit_paramaters)

        if cur.rowcount:
            # There's Data
            this_audit_id = cur.fetchone()[0]
            have_audit_id = True
        else:
            have_audit_id = False

        replace_query_args = list()

        # Always Specified Columns
        columns = """audit_name, audit_short_description, audit_long_description,
                    audit_primary_link, audit_secondary_links, audit_filters,
                    audit_comparison, audit_priority, filename """

        dynamic_column_items = list()
        dynamic_column_args = list()

        # print(this_audit_secondary_links)

        for secondary_link in this_audit_secondary_links:
            # print(secondary_link)
            dynamic_column_items.append(" %s , %s ")

            # Add to Args List
            dynamic_column_args.append(str(secondary_link))
            dynamic_column_args.append(
                str(this_audit_secondary_links[secondary_link]))

        # Make this a list of double %s 's
        # Put it in the thing
        dynamic_column = " , ".join(dynamic_column_items)
        dynamic_column = "COLUMN_CREATE(" + dynamic_column + ")"

        # Values String
        this_audit_values = " %s , %s , %s , %s , " + dynamic_column + ", %s " + \
            " , %s , %s , %s "

        this_audit_value_paramaters = [
            str(this_audit_name),
            str(this_audit_short)[:63],
            str(this_audit_long_description),
            str(this_audit_primary_link)
        ]
        this_audit_value_paramaters.extend(dynamic_column_args)

        temp_list = [
            str(this_audit_filters).replace('\'', '"')[:511],
            str(this_audit_comparison).replace('\'', '"')[:511],
            str(this_audit_priority),
            str(this_audit_filename)
        ]

        this_audit_value_paramaters.extend(temp_list)

        query_head = "REPLACE into audits ( "
        query_mid = " ) VALUES ( "

        query_tail = " ) "

        if have_audit_id:
            # Have Audit ID so add it, otherwise these won't be added
            columns = columns + " , audit_id "
            this_audit_values = this_audit_values + ", %s "
            this_audit_value_paramaters.append(str(this_audit_id))

        query_string = query_head + columns + query_mid + this_audit_values + query_tail

        # This is a replace and will update the audit no matter.
        debug_sql = cur.mogrify(query_string, this_audit_value_paramaters)
        cur.execute(query_string, this_audit_value_paramaters)

        this_row = cur.lastrowid

        cur.close()
        return this_row
コード例 #10
0
    def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName,
                          return_dict, audit_id):

        # Note that db config items is the same as config_itesm

        logger = logging.getLogger("analyze_one_audit")

        logger.debug("Attempting to Analyze Audit {}/{}".format(
            auditName, audit_id))

        try:
            # I multithread like a boss now. :) JK But I need to give each audit it's own conn to the DB:
            db_conn = manoward.get_conn(db_config_items,
                                        prefix="analyze_",
                                        tojq=".database",
                                        ac_def=True)

            #
            host_buckets = dict()
            host_comparison = dict()

            # New Results Variables for Future Reference
            pop_results = dict()
            srvtype_results = dict()
            audit_results = dict()

            # Add Insert_Update Counters
            audit_host_inserts = 0
            audit_host_updates = 0

            # Debug Host No More

            # Create Bucket Objects from Config
            for bucket in oneAudit["filters"]:
                host_buckets[bucket] = []
                host_comparison[bucket] = []

            # Bucket Hosts Left (All the hosts before I start silly
            items_left_to_bucket = list_of_hosts

            logger.debug(
                "{} Preparing to Analyze {} Hosts with {} Buckets".format(
                    auditName, len(items_left_to_bucket),
                    len(oneAudit["filters"].keys())))

            for bucket in oneAudit["filters"]:

                logger.debug("{} Processing Bucket {}".format(
                    auditName, bucket))

                this_mtype = oneAudit["filters"][bucket]["filter-match"]
                this_ctype = oneAudit["filters"][bucket][
                    "filter-collection-type"]
                this_csubtype = oneAudit["filters"][bucket][
                    "filter-collection-subtype"]
                this_mvalue = oneAudit["filters"][bucket]["filter-match-value"]

                #print(this_mtype, this_ctype, this_csubtype, this_mvalue)
                try:
                    bucket_results = generic_large_compare(
                        db_conn,
                        items_left_to_bucket,
                        this_mtype,
                        this_ctype,
                        this_csubtype,
                        this_mvalue,
                        FRESH,
                        exemptfail=True,
                        audit_name=auditName)
                except Exception as glc_bucket_results_error:
                    logger.error(
                        "Error on Generic Large Compare on bucket {} : audit {}"
                        .format(bucket, auditName))
                    logger.warning(
                        "Maybe no Hosts for Bucket {} on audit {}".format(
                            bucket, auditName))
                    logger.debug("Error : {}".format(glc_bucket_results_error))
                else:
                    # Grab just the items that passed
                    if bucket == "bionic-bucket":
                        logger.info(
                            "{} Bionic Bucket Filter Results {}".format(
                                auditName, bucket_results))

                    for result in bucket_results:
                        if "pfe" in result.keys():
                            if result["pfe"] == "pass":
                                # Remove pfe & pfevalue from this host so it can be checked again
                                try:
                                    del result["pfe"]
                                    del result["pfevalue"]
                                    # Add my stripped result to the host bucket.
                                    # print(result)
                                    host_buckets[bucket].append(result)
                                except Exception as e:
                                    print("Error adding host to host buckets",
                                          e)

                    # Make an index of just the host ids
                    this_bucket_ids = [
                        gotem["host_id"] for gotem in host_buckets[bucket]
                    ]

                    # Grab just the items that haven't been bucketd yet (so I don't have to compare everything, everytime)
                    items_left_to_bucket = [
                        host_id for host_id in list_of_hosts
                        if host_id not in this_bucket_ids
                    ]

            # Host Bucketing
            for comparison in host_buckets.keys():
                # print(comparison)

                if len(host_buckets[comparison]) > 0:
                    try:
                        this_mtype = oneAudit["comparisons"][comparison][
                            "comparison-match"]
                        this_ctype = oneAudit["comparisons"][comparison][
                            "comparison-collection-type"]
                        this_csubtype = oneAudit["comparisons"][comparison][
                            "comparison-collection-subtype"]
                        this_mvalue = oneAudit["comparisons"][comparison][
                            "comparison-match-value"]
                        #print(this_mtype, this_ctype, this_csubtype, this_mvalue)
                    except Exception as comparison_error:
                        logger.error(
                            "Error grabbing comparisons for audit {} : {}".
                            format(auditName, comparison_error))
                    else:
                        # Check What Type
                        logger.debug(
                            "{} Running Comparison on Bucket {}/{}".format(
                                auditName, comparison,
                                len(host_buckets[comparison])))

                        if this_mtype in [
                                "subnonhere", "suballhere", "subknowall"
                        ]:
                            # Add Massive Subtype
                            try:
                                comparison_results = subtype_large_compare(
                                    db_conn, host_buckets[comparison],
                                    this_mtype, this_ctype, this_csubtype,
                                    this_mvalue, FRESH)
                            except Exception as subtype_large_compare_error:
                                logger.error(
                                    "{} Error on Subtype Large Compare on Comparison for bucket {}"
                                    .format(auditName, comparison))

                                logger.debug("Error : {}".format(
                                    subtype_large_compare_error))
                            else:
                                host_comparison[
                                    comparison] = comparison_results

                        else:
                            # Generic Comparison
                            try:
                                comparison_results = generic_large_compare(
                                    db_conn, host_buckets[comparison],
                                    this_mtype, this_ctype, this_csubtype,
                                    this_mvalue, FRESH)
                                # print(comparison_results)
                            except Exception as generic_large_compare_error:
                                logger.error(
                                    "{} Error on Generic Large Compare on Comparison for bucket {}"
                                    .format(auditName, comparison))

                                logger.debug("Error : {}".format(
                                    generic_large_compare_error))
                            else:
                                host_comparison[
                                    comparison] = comparison_results
                else:
                    # Possible Future Nothing to Compare for {} bucket on audit blah message
                    pass

            #bucket in host_bucket
            #print(auditName, " Results : ", host_comparison)
            logger.debug(host_comparison)
            logger.info(host_comparison)
            massinserts = 0
            massupdates = 0
            massinserts, massupdates = generic_large_analysis_store(
                db_conn, audit_id, host_comparison, FRESH)

            # Return Dict is a manager.dict() so the "above" process knows what changes here.
            return_dict["host_inserts"] = massinserts
            return_dict["host_updates"] = massupdates

        except Exception as analyze_error:
            logger.error("Error doing analyze for {} : {}".format(
                auditName, analyze_error))
            logger.debug(analyze_error)
            sys.exit(1)
        else:
            sys.exit(0)